In [2]:
%matplotlib inline
import pandas as pd 
import numpy as np
from statsmodels.stats.multicomp import pairwise_tukeyhsd
from statsmodels.stats.multicomp import MultiComparison
from statsmodels.formula.api import ols
from scipy import stats
In [3]:
data = pd.read_csv("playlists.csv", sep=";", encoding = "ISO-8859-1") 
In [4]:
data.describe(include="all")
Out[4]:
company playlist_sample namesfiles no artist song sampleratefiles totalsamplesfiles durationfiles bitratefiles ... chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 attackslopefiles attackleapfiles
count 1782 1782.000000 1782 1782.000000 1782 1782 1782.0 1.782000e+03 1782.000000 1782.000000 ... 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000 1782.000000
unique 6 NaN 515 NaN 353 443 NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
top Arte Francés NaN 21 - Galantis, Uffie - Spaceship (feat. Uffie)... NaN Satin Jackets Hula Hoop.mp3 ... NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
freq 441 NaN 6 NaN 51 12 NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
mean NaN 2.084175 NaN 17.116162 NaN NaN 44100.0 1.043632e+07 236.651237 252.336700 ... 0.332301 0.319191 0.265246 0.440462 0.549565 0.581967 0.477825 0.430522 15.804409 0.507503
std NaN 1.114796 NaN 11.837401 NaN NaN 0.0 3.227105e+06 73.176981 88.377597 ... 0.270616 0.263919 0.249612 0.290454 0.314771 0.323173 0.321646 0.295563 9.338659 0.247587
min NaN 1.000000 NaN 1.000000 NaN NaN 44100.0 5.965054e+06 135.262000 128.000000 ... 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.168304
25% NaN 1.000000 NaN 8.000000 NaN NaN 44100.0 8.353151e+06 189.413850 128.000000 ... 0.116591 0.109123 0.075378 0.204738 0.301961 0.316822 0.214680 0.203097 9.810711 0.285584
50% NaN 2.000000 NaN 15.500000 NaN NaN 44100.0 9.480378e+06 214.974562 320.000000 ... 0.262799 0.259887 0.183082 0.396861 0.520957 0.570088 0.418077 0.379737 14.833864 0.452456
75% NaN 3.000000 NaN 24.000000 NaN NaN 44100.0 1.146931e+07 260.075075 320.000000 ... 0.494897 0.469603 0.384485 0.642814 0.829318 0.918554 0.735384 0.616558 19.964413 0.730669
max NaN 5.000000 NaN 65.000000 NaN NaN 44100.0 2.843136e+07 644.702000 320.000000 ... 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000 66.233620 0.999408

11 rows × 64 columns

Find positives and negatives songs of the process for every company.

In [5]:
companies = data['company'].unique()
by_company = [data[data.company == company] for company in companies]
positives = []
negatives = []
for data_com in by_company:
    data_com = data_com.sort_values('playlist_sample')
    last_pl = int(data_com.tail(1).playlist_sample)
    pls = pd.DataFrame({'pl':range (1,last_pl+1), 'old':[0]*last_pl, 'new':[0]*last_pl})
    # pls.new[0]=(data_com.query('playlist_sample == '+str(1)).shape[0])/3
    # for i in range(2,last_pl+1):
      #  curr_pl = data_com.query('playlist_sample == '+str(i))
      #  pre_pl = data_com.query('playlist_sample == '+str(i-1))
      #  olds = curr_pl['song'].map(pre_pl['song'].value_counts()).sum(axis = 0)/3
      #  pls.old[i-1]= olds/3 
      #  pls.new[i-1]=(curr_pl.shape[0]-olds)/3
    # pls[['old','new']].plot(kind='bar', stacked=True, title=data_com.iloc[0,0])
    df_last_pl= data_com.query('playlist_sample == '+str(last_pl))
    positives.append(df_last_pl)
    pos_loc = pd.DataFrame({}, columns=data_com.columns)
    for index, row in data_com[data_com.playlist_sample<last_pl].iterrows(): 
        if not ((df_last_pl['artist'] == row['artist']) & (df_last_pl['song'] ==  row['song'])).any():
            pos_loc= pos_loc.append(row, ignore_index=True)
    # n_vs_p = pd.DataFrame({'sam':['pos', 'neg'],'num':[df_last_pl.shape[0]/3,pos_loc.shape[0]/3]})
    # n_vs_p.plot.bar(x='sam', y='num', rot=0, title=data_com.iloc[0,0])
    negatives.append(pos_loc)
In [6]:
df_n_ps = []
for i in range(len(negatives)):
    negatives[i]['chosen']=0
    positives[i]['chosen']=1
    df_n_ps.append(negatives[i].append(positives[i]))
D:\Usuarios\1144084318\AppData\Roaming\Python\Python37\site-packages\ipykernel_launcher.py:4: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  after removing the cwd from sys.path.

ANOVA

In [7]:
import warnings
import matplotlib.pyplot as plt
import math
import seaborn as sns
warnings.filterwarnings('ignore')
In [ ]:
alpha = 0.05
for df_n_p in df_n_ps:
    df_n_p = df_n_p.fillna(0)
    fig = plt.figure(figsize=(17,200))
    i=1
    for index in range(8,df_n_p.shape[1]-1):
        name= df_n_p.columns.values[index]
        df_n_p[name]=df_n_p[name].astype('float64') 
        mc = MultiComparison(df_n_p[name], df_n_p['chosen'])
        mc_results = mc.tukeyhsd()
        if mc_results._results_table.data[1:][0][5]:
            # print(name)
            results = ols(name+' ~ C(chosen)', data=df_n_p).fit()
            homogeneity_test = stats.levene(df_n_p[name][df_n_p['chosen'] == 0], df_n_p[name][df_n_p['chosen'] == 1])[1]
            normality_test = stats.shapiro(results.resid)[1]
            if homogeneity_test > alpha and normality_test > alpha:
                # print(results.summary())
                ax = fig.add_subplot(math.ceil(df_n_p.shape[1]-9/2), 2, i)
                sns.kdeplot(df_n_p.loc[df_n_p.chosen==0][name], shade=True, ax=ax);
                sns.kdeplot(df_n_p.loc[df_n_p.chosen==1][name], shade=True, ax=ax);
                plt.title(df_n_p.iloc[0,0].upper()+" "+name)
                plt.legend(['neg', 'pos'])
                i+=1
In [8]:
from collections import Counter


from sklearn.cluster import KMeans
from sklearn.metrics import confusion_matrix, accuracy_score, silhouette_samples, silhouette_score, calinski_harabaz_score
from sklearn import preprocessing
from sklearn.decomposition import PCA
In [9]:
for i in range(len(companies)):
    df_n_ps[i].bitratefiles = df_n_ps[i].bitratefiles.astype('float64')
    df_n_ps[i].pitchfiles = df_n_ps[i].pitchfiles.astype('float64')
    df_n_ps[i].bestkeyfiles = df_n_ps[i].bestkeyfiles.astype('float64')
df_n_ps[0].info()
<class 'pandas.core.frame.DataFrame'>
Int64Index: 372 entries, 0 to 179
Data columns (total 65 columns):
company                 372 non-null object
playlist_sample         372 non-null object
namesfiles              372 non-null object
no                      372 non-null object
artist                  372 non-null object
song                    372 non-null object
sampleratefiles         372 non-null object
totalsamplesfiles       372 non-null object
durationfiles           372 non-null float64
bitratefiles            372 non-null float64
rmsfiles                372 non-null float64
rmsmedianfiles          372 non-null float64
lowenergyfiles          372 non-null float64
ASRfiles                372 non-null float64
beatspectrumfiles       372 non-null float64
eventdensityfiles       372 non-null float64
tempofiles              372 non-null float64
pulseclarityfiles       372 non-null float64
zerocrossfiles          372 non-null float64
rolloffsfiles           372 non-null float64
brightnessfiles         372 non-null float64
spreadfiles             372 non-null float64
centroidfiles           371 non-null float64
kurtosisfiles           372 non-null float64
flatnessfiles           372 non-null float64
entropyfiles            372 non-null float64
mfccfiles_1             372 non-null float64
mfccfiles_2             372 non-null float64
mfccfiles_3             372 non-null float64
mfccfiles_4             372 non-null float64
mfccfiles_5             372 non-null float64
mfccfiles_6             372 non-null float64
mfccfiles_7             372 non-null float64
mfccfiles_8             372 non-null float64
mfccfiles_9             372 non-null float64
mfccfiles_10            372 non-null float64
mfccfiles_11            372 non-null float64
mfccfiles_12            372 non-null float64
mfccfiles_13            372 non-null float64
pitchfiles              372 non-null float64
inharmonicityfiles      372 non-null float64
bestkeyfiles            372 non-null float64
keyclarityfiles         372 non-null float64
modalityfiles           372 non-null float64
tonalcentroidfiles_1    372 non-null float64
tonalcentroidfiles_2    372 non-null float64
tonalcentroidfiles_3    372 non-null float64
tonalcentroidfiles_4    372 non-null float64
tonalcentroidfiles_5    372 non-null float64
tonalcentroidfiles_6    372 non-null float64
chromagramfiles_1       372 non-null float64
chromagramfiles_2       372 non-null float64
chromagramfiles_3       372 non-null float64
chromagramfiles_4       372 non-null float64
chromagramfiles_5       372 non-null float64
chromagramfiles_6       372 non-null float64
chromagramfiles_7       372 non-null float64
chromagramfiles_8       372 non-null float64
chromagramfiles_9       372 non-null float64
chromagramfiles_10      372 non-null float64
chromagramfiles_11      372 non-null float64
chromagramfiles_12      372 non-null float64
attackslopefiles        372 non-null float64
attackleapfiles         372 non-null float64
chosen                  372 non-null int64
dtypes: float64(56), int64(1), object(8)
memory usage: 191.8+ KB

Vamos a reemplazar los NaN y entonces a normalizar los datos para que todas las variables tengan la misma importancia. Solo vamos a considerar los datos numéricos.

In [10]:
df_n_ps_std = [0]*len(companies)
for i in range(len(companies)):
    df_n_ps[i] = df_n_ps[i].fillna(0)
    df_n_ps_std[i] = pd.DataFrame(preprocessing.scale(df_n_ps[i].iloc[:,8:]))
    df_n_ps_std[i].columns=df_n_ps[i].columns[8:]
df_n_ps_std[0].mean(axis=0)
Out[10]:
durationfiles          -4.261824e-16
bitratefiles            0.000000e+00
rmsfiles                4.303606e-16
rmsmedianfiles         -4.279731e-16
lowenergyfiles         -2.387576e-18
ASRfiles               -2.023471e-16
beatspectrumfiles       2.477111e-16
eventdensityfiles      -7.879002e-17
tempofiles             -3.133694e-17
pulseclarityfiles       3.103849e-17
zerocrossfiles         -2.930750e-16
rolloffsfiles           5.789873e-16
brightnessfiles        -8.356517e-17
spreadfiles            -3.842506e-16
centroidfiles          -2.142850e-16
kurtosisfiles          -6.327077e-17
flatnessfiles           1.366887e-16
entropyfiles            3.516900e-15
mfccfiles_1            -1.921999e-16
mfccfiles_2            -5.372047e-18
mfccfiles_3            -1.178120e-16
mfccfiles_4            -2.648718e-17
mfccfiles_5            -4.655774e-17
mfccfiles_6            -1.193788e-18
mfccfiles_7             6.002516e-17
mfccfiles_8             1.492235e-17
mfccfiles_9             5.133289e-17
mfccfiles_10            2.596489e-17
mfccfiles_11           -3.402296e-17
mfccfiles_12           -4.775153e-18
mfccfiles_13           -4.476706e-18
pitchfiles              0.000000e+00
inharmonicityfiles      2.595743e-15
bestkeyfiles           -8.475896e-17
keyclarityfiles         5.369062e-16
modalityfiles          -3.282918e-17
tonalcentroidfiles_1   -1.522080e-17
tonalcentroidfiles_2   -6.565835e-18
tonalcentroidfiles_3   -9.699529e-18
tonalcentroidfiles_4    1.671303e-17
tonalcentroidfiles_5   -2.193586e-17
tonalcentroidfiles_6    2.059285e-17
chromagramfiles_1      -1.811574e-16
chromagramfiles_2      -4.282715e-17
chromagramfiles_3       4.819920e-17
chromagramfiles_4      -2.188363e-16
chromagramfiles_5      -3.282918e-18
chromagramfiles_6      -8.834033e-17
chromagramfiles_7       3.730588e-17
chromagramfiles_8      -1.140068e-16
chromagramfiles_9      -2.715868e-17
chromagramfiles_10     -6.707597e-17
chromagramfiles_11     -6.051014e-17
chromagramfiles_12      2.148446e-16
attackslopefiles       -4.327482e-17
attackleapfiles        -1.551925e-16
chosen                 -3.068036e-16
dtype: float64
In [11]:
df_n_ps_std[0].std(axis=0)
Out[11]:
durationfiles           1.001347
bitratefiles            0.000000
rmsfiles                1.001347
rmsmedianfiles          1.001347
lowenergyfiles          1.001347
ASRfiles                1.001347
beatspectrumfiles       1.001347
eventdensityfiles       1.001347
tempofiles              1.001347
pulseclarityfiles       1.001347
zerocrossfiles          1.001347
rolloffsfiles           1.001347
brightnessfiles         1.001347
spreadfiles             1.001347
centroidfiles           1.001347
kurtosisfiles           1.001347
flatnessfiles           1.001347
entropyfiles            1.001347
mfccfiles_1             1.001347
mfccfiles_2             1.001347
mfccfiles_3             1.001347
mfccfiles_4             1.001347
mfccfiles_5             1.001347
mfccfiles_6             1.001347
mfccfiles_7             1.001347
mfccfiles_8             1.001347
mfccfiles_9             1.001347
mfccfiles_10            1.001347
mfccfiles_11            1.001347
mfccfiles_12            1.001347
mfccfiles_13            1.001347
pitchfiles              0.000000
inharmonicityfiles      1.001347
bestkeyfiles            1.001347
keyclarityfiles         1.001347
modalityfiles           1.001347
tonalcentroidfiles_1    1.001347
tonalcentroidfiles_2    1.001347
tonalcentroidfiles_3    1.001347
tonalcentroidfiles_4    1.001347
tonalcentroidfiles_5    1.001347
tonalcentroidfiles_6    1.001347
chromagramfiles_1       1.001347
chromagramfiles_2       1.001347
chromagramfiles_3       1.001347
chromagramfiles_4       1.001347
chromagramfiles_5       1.001347
chromagramfiles_6       1.001347
chromagramfiles_7       1.001347
chromagramfiles_8       1.001347
chromagramfiles_9       1.001347
chromagramfiles_10      1.001347
chromagramfiles_11      1.001347
chromagramfiles_12      1.001347
attackslopefiles        1.001347
attackleapfiles         1.001347
chosen                  1.001347
dtype: float64

Borramos pitch y bitrate porque todos sus valores son 0.

In [12]:
for i in range(len(companies)):
    df_n_ps_std[i] = df_n_ps_std[i].drop(columns="pitchfiles")
    df_n_ps_std[i] = df_n_ps_std[i].drop(columns="bitratefiles")
In [16]:
df_n_ps_std[0].columns
Out[16]:
Index(['durationfiles', 'rmsfiles', 'rmsmedianfiles', 'lowenergyfiles',
       'ASRfiles', 'beatspectrumfiles', 'eventdensityfiles', 'tempofiles',
       'pulseclarityfiles', 'zerocrossfiles', 'rolloffsfiles',
       'brightnessfiles', 'spreadfiles', 'centroidfiles', 'kurtosisfiles',
       'flatnessfiles', 'entropyfiles', 'mfccfiles_1', 'mfccfiles_2',
       'mfccfiles_3', 'mfccfiles_4', 'mfccfiles_5', 'mfccfiles_6',
       'mfccfiles_7', 'mfccfiles_8', 'mfccfiles_9', 'mfccfiles_10',
       'mfccfiles_11', 'mfccfiles_12', 'mfccfiles_13', 'inharmonicityfiles',
       'bestkeyfiles', 'keyclarityfiles', 'modalityfiles',
       'tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
       'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6',
       'chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
       'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
       'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
       'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12',
       'attackslopefiles', 'attackleapfiles', 'chosen'],
      dtype='object')

MFCC

In [175]:
df_n_ps_std[0].columns[17:30]
Out[175]:
Index(['mfccfiles_1', 'mfccfiles_2', 'mfccfiles_3', 'mfccfiles_4',
       'mfccfiles_5', 'mfccfiles_6', 'mfccfiles_7', 'mfccfiles_8',
       'mfccfiles_9', 'mfccfiles_10', 'mfccfiles_11', 'mfccfiles_12',
       'mfccfiles_13'],
      dtype='object')
In [176]:
df_n_ps_std_mfcc = [None]*len(companies)
for i in range(len(companies)):
    df_n_ps_std_mfcc[i] = pd.DataFrame(df_n_ps_std[i].iloc[:,17:30])
    df_n_ps_std_mfcc[i].columns=df_n_ps_std[i].columns[17:30]
df_n_ps_std_mfcc[0].info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 372 entries, 0 to 371
Data columns (total 13 columns):
mfccfiles_1     372 non-null float64
mfccfiles_2     372 non-null float64
mfccfiles_3     372 non-null float64
mfccfiles_4     372 non-null float64
mfccfiles_5     372 non-null float64
mfccfiles_6     372 non-null float64
mfccfiles_7     372 non-null float64
mfccfiles_8     372 non-null float64
mfccfiles_9     372 non-null float64
mfccfiles_10    372 non-null float64
mfccfiles_11    372 non-null float64
mfccfiles_12    372 non-null float64
mfccfiles_13    372 non-null float64
dtypes: float64(13)
memory usage: 37.9 KB

Arte Francés

ANN

In [13]:
import keras
keras.__version__
Using TensorFlow backend.
Out[13]:
'2.3.0'
In [14]:
from keras.layers import Input, Flatten, Dense#, Lambda
from keras.models import Model
from keras import layers
from keras import models, optimizers

from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import GridSearchCV #permite buscar la mejor configuración de parámetros con C-V
from sklearn.metrics import make_scorer # permite crear una clase scorer a partir de una función de score (necesario para el kappa)
from sklearn.metrics import accuracy_score, cohen_kappa_score, classification_report, roc_auc_score
from sklearn.model_selection import train_test_split #metodo de particionamiento de datasets para evaluación
from sklearn.preprocessing import StandardScaler
In [177]:
X = df_n_ps_std_mfcc[0]
In [178]:
y = df_n_ps[0]['chosen']
In [179]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [180]:
X_train.shape
Out[180]:
(279, 13)
In [24]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [25]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [26]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [27]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.006, 'max_iter': 300}, que permiten obtener un Accuracy de 82.08% y un Kappa del 43.49
Tiempo total: 29.12 minutos
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (300) reached and the optimization hasn't converged yet.
  % self.max_iter, ConvergenceWarning)
In [181]:
grid.best_params_['hidden_layer_sizes']=[20]
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.006
epochs = 300
In [182]:
input_tensor = Input(shape = (n0,))
In [183]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [184]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [185]:
model.summary()
Model: "model_6"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_6 (InputLayer)         (None, 13)                0         
_________________________________________________________________
dense_17 (Dense)             (None, 20)                280       
_________________________________________________________________
dense_18 (Dense)             (None, 1)                 21        
=================================================================
Total params: 301
Trainable params: 301
Non-trainable params: 0
_________________________________________________________________
In [186]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 279 samples, validate on 93 samples
Epoch 1/300
279/279 [==============================] - 0s 641us/step - loss: 0.7161 - accuracy: 0.5556 - val_loss: 0.6348 - val_accuracy: 0.6667
Epoch 2/300
279/279 [==============================] - 0s 47us/step - loss: 0.5940 - accuracy: 0.7025 - val_loss: 0.6026 - val_accuracy: 0.6989
Epoch 3/300
279/279 [==============================] - 0s 47us/step - loss: 0.5317 - accuracy: 0.7599 - val_loss: 0.5880 - val_accuracy: 0.7097
Epoch 4/300
279/279 [==============================] - 0s 47us/step - loss: 0.4879 - accuracy: 0.7849 - val_loss: 0.5798 - val_accuracy: 0.7097
Epoch 5/300
279/279 [==============================] - 0s 50us/step - loss: 0.4620 - accuracy: 0.7957 - val_loss: 0.5636 - val_accuracy: 0.7312
Epoch 6/300
279/279 [==============================] - 0s 50us/step - loss: 0.4473 - accuracy: 0.7993 - val_loss: 0.5569 - val_accuracy: 0.7204
Epoch 7/300
279/279 [==============================] - 0s 79us/step - loss: 0.4387 - accuracy: 0.7993 - val_loss: 0.5600 - val_accuracy: 0.7312
Epoch 8/300
279/279 [==============================] - 0s 54us/step - loss: 0.4304 - accuracy: 0.8065 - val_loss: 0.5599 - val_accuracy: 0.7419
Epoch 9/300
279/279 [==============================] - 0s 47us/step - loss: 0.4272 - accuracy: 0.7993 - val_loss: 0.5650 - val_accuracy: 0.7204
Epoch 10/300
279/279 [==============================] - 0s 54us/step - loss: 0.4187 - accuracy: 0.8100 - val_loss: 0.5698 - val_accuracy: 0.7312
Epoch 11/300
279/279 [==============================] - 0s 54us/step - loss: 0.4159 - accuracy: 0.8172 - val_loss: 0.5727 - val_accuracy: 0.7312
Epoch 12/300
279/279 [==============================] - 0s 50us/step - loss: 0.4103 - accuracy: 0.8244 - val_loss: 0.5749 - val_accuracy: 0.7312
Epoch 13/300
279/279 [==============================] - 0s 54us/step - loss: 0.4058 - accuracy: 0.8315 - val_loss: 0.5743 - val_accuracy: 0.6989
Epoch 14/300
279/279 [==============================] - 0s 54us/step - loss: 0.4027 - accuracy: 0.8315 - val_loss: 0.5734 - val_accuracy: 0.7312
Epoch 15/300
279/279 [==============================] - 0s 72us/step - loss: 0.3979 - accuracy: 0.8315 - val_loss: 0.5791 - val_accuracy: 0.7527
Epoch 16/300
279/279 [==============================] - 0s 64us/step - loss: 0.3922 - accuracy: 0.8315 - val_loss: 0.5729 - val_accuracy: 0.7097
Epoch 17/300
279/279 [==============================] - 0s 57us/step - loss: 0.3870 - accuracy: 0.8423 - val_loss: 0.5687 - val_accuracy: 0.7204
Epoch 18/300
279/279 [==============================] - 0s 50us/step - loss: 0.3831 - accuracy: 0.8423 - val_loss: 0.5750 - val_accuracy: 0.7312
Epoch 19/300
279/279 [==============================] - 0s 57us/step - loss: 0.3779 - accuracy: 0.8387 - val_loss: 0.5681 - val_accuracy: 0.7204
Epoch 20/300
279/279 [==============================] - 0s 57us/step - loss: 0.3730 - accuracy: 0.8423 - val_loss: 0.5734 - val_accuracy: 0.7204
Epoch 21/300
279/279 [==============================] - 0s 57us/step - loss: 0.3681 - accuracy: 0.8423 - val_loss: 0.5783 - val_accuracy: 0.7097
Epoch 22/300
279/279 [==============================] - 0s 57us/step - loss: 0.3633 - accuracy: 0.8423 - val_loss: 0.5685 - val_accuracy: 0.7312
Epoch 23/300
279/279 [==============================] - 0s 57us/step - loss: 0.3576 - accuracy: 0.8459 - val_loss: 0.5778 - val_accuracy: 0.7312
Epoch 24/300
279/279 [==============================] - 0s 82us/step - loss: 0.3528 - accuracy: 0.8459 - val_loss: 0.5799 - val_accuracy: 0.7097
Epoch 25/300
279/279 [==============================] - 0s 64us/step - loss: 0.3481 - accuracy: 0.8495 - val_loss: 0.5794 - val_accuracy: 0.7312

Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.003000000026077032.
Epoch 26/300
279/279 [==============================] - 0s 64us/step - loss: 0.3408 - accuracy: 0.8530 - val_loss: 0.5796 - val_accuracy: 0.7204
Epoch 27/300
279/279 [==============================] - 0s 50us/step - loss: 0.3393 - accuracy: 0.8530 - val_loss: 0.5772 - val_accuracy: 0.7097
Epoch 28/300
279/279 [==============================] - 0s 50us/step - loss: 0.3361 - accuracy: 0.8530 - val_loss: 0.5776 - val_accuracy: 0.7097
Epoch 29/300
279/279 [==============================] - 0s 50us/step - loss: 0.3335 - accuracy: 0.8530 - val_loss: 0.5781 - val_accuracy: 0.7204
Epoch 30/300
279/279 [==============================] - 0s 54us/step - loss: 0.3306 - accuracy: 0.8566 - val_loss: 0.5785 - val_accuracy: 0.7097
Epoch 31/300
279/279 [==============================] - 0s 50us/step - loss: 0.3282 - accuracy: 0.8566 - val_loss: 0.5786 - val_accuracy: 0.7097
Epoch 32/300
279/279 [==============================] - 0s 50us/step - loss: 0.3252 - accuracy: 0.8602 - val_loss: 0.5801 - val_accuracy: 0.7097
Epoch 33/300
279/279 [==============================] - 0s 50us/step - loss: 0.3228 - accuracy: 0.8602 - val_loss: 0.5783 - val_accuracy: 0.7204
Epoch 34/300
279/279 [==============================] - 0s 50us/step - loss: 0.3201 - accuracy: 0.8602 - val_loss: 0.5823 - val_accuracy: 0.6989
Epoch 35/300
279/279 [==============================] - 0s 68us/step - loss: 0.3176 - accuracy: 0.8602 - val_loss: 0.5773 - val_accuracy: 0.7097

Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.001500000013038516.
Epoch 36/300
279/279 [==============================] - 0s 57us/step - loss: 0.3139 - accuracy: 0.8674 - val_loss: 0.5800 - val_accuracy: 0.7097
Epoch 37/300
279/279 [==============================] - 0s 50us/step - loss: 0.3130 - accuracy: 0.8674 - val_loss: 0.5834 - val_accuracy: 0.6989
Epoch 38/300
279/279 [==============================] - 0s 54us/step - loss: 0.3114 - accuracy: 0.8638 - val_loss: 0.5815 - val_accuracy: 0.7097
Epoch 39/300
279/279 [==============================] - 0s 61us/step - loss: 0.3097 - accuracy: 0.8638 - val_loss: 0.5837 - val_accuracy: 0.6989
Epoch 40/300
279/279 [==============================] - 0s 50us/step - loss: 0.3086 - accuracy: 0.8638 - val_loss: 0.5840 - val_accuracy: 0.7097
Epoch 41/300
279/279 [==============================] - 0s 50us/step - loss: 0.3073 - accuracy: 0.8674 - val_loss: 0.5840 - val_accuracy: 0.6989
Epoch 42/300
279/279 [==============================] - 0s 54us/step - loss: 0.3053 - accuracy: 0.8674 - val_loss: 0.5843 - val_accuracy: 0.6989
Epoch 43/300
279/279 [==============================] - 0s 50us/step - loss: 0.3051 - accuracy: 0.8674 - val_loss: 0.5836 - val_accuracy: 0.7097
Epoch 44/300
279/279 [==============================] - 0s 54us/step - loss: 0.3031 - accuracy: 0.8674 - val_loss: 0.5828 - val_accuracy: 0.7097
Epoch 45/300
279/279 [==============================] - 0s 50us/step - loss: 0.3014 - accuracy: 0.8638 - val_loss: 0.5844 - val_accuracy: 0.7097

Epoch 00045: ReduceLROnPlateau reducing learning rate to 0.000750000006519258.
Epoch 46/300
279/279 [==============================] - 0s 54us/step - loss: 0.3004 - accuracy: 0.8638 - val_loss: 0.5851 - val_accuracy: 0.7097
Epoch 47/300
279/279 [==============================] - 0s 54us/step - loss: 0.2993 - accuracy: 0.8638 - val_loss: 0.5854 - val_accuracy: 0.7097
Epoch 48/300
279/279 [==============================] - 0s 50us/step - loss: 0.2988 - accuracy: 0.8638 - val_loss: 0.5866 - val_accuracy: 0.7097
Epoch 49/300
279/279 [==============================] - 0s 50us/step - loss: 0.2981 - accuracy: 0.8674 - val_loss: 0.5869 - val_accuracy: 0.7097
Epoch 50/300
279/279 [==============================] - 0s 50us/step - loss: 0.2973 - accuracy: 0.8674 - val_loss: 0.5866 - val_accuracy: 0.7097
Epoch 51/300
279/279 [==============================] - 0s 54us/step - loss: 0.2967 - accuracy: 0.8674 - val_loss: 0.5879 - val_accuracy: 0.7097
Epoch 52/300
279/279 [==============================] - 0s 50us/step - loss: 0.2961 - accuracy: 0.8674 - val_loss: 0.5873 - val_accuracy: 0.7097
Epoch 53/300
279/279 [==============================] - 0s 50us/step - loss: 0.2953 - accuracy: 0.8674 - val_loss: 0.5886 - val_accuracy: 0.7097
Epoch 54/300
279/279 [==============================] - 0s 50us/step - loss: 0.2947 - accuracy: 0.8710 - val_loss: 0.5880 - val_accuracy: 0.7097
Epoch 55/300
279/279 [==============================] - 0s 54us/step - loss: 0.2941 - accuracy: 0.8674 - val_loss: 0.5889 - val_accuracy: 0.7097

Epoch 00055: ReduceLROnPlateau reducing learning rate to 0.000375000003259629.
Epoch 56/300
279/279 [==============================] - 0s 54us/step - loss: 0.2933 - accuracy: 0.8710 - val_loss: 0.5887 - val_accuracy: 0.7097
Epoch 57/300
279/279 [==============================] - 0s 75us/step - loss: 0.2931 - accuracy: 0.8746 - val_loss: 0.5885 - val_accuracy: 0.7097
Epoch 58/300
279/279 [==============================] - 0s 64us/step - loss: 0.2927 - accuracy: 0.8746 - val_loss: 0.5887 - val_accuracy: 0.7097
Epoch 59/300
279/279 [==============================] - 0s 61us/step - loss: 0.2923 - accuracy: 0.8781 - val_loss: 0.5899 - val_accuracy: 0.7097
Epoch 60/300
279/279 [==============================] - 0s 54us/step - loss: 0.2919 - accuracy: 0.8781 - val_loss: 0.5899 - val_accuracy: 0.7097
Epoch 61/300
279/279 [==============================] - 0s 57us/step - loss: 0.2916 - accuracy: 0.8781 - val_loss: 0.5900 - val_accuracy: 0.7097
Epoch 62/300
279/279 [==============================] - 0s 57us/step - loss: 0.2913 - accuracy: 0.8781 - val_loss: 0.5906 - val_accuracy: 0.7097
Epoch 63/300
279/279 [==============================] - 0s 50us/step - loss: 0.2909 - accuracy: 0.8781 - val_loss: 0.5904 - val_accuracy: 0.7097
Epoch 64/300
279/279 [==============================] - 0s 54us/step - loss: 0.2907 - accuracy: 0.8781 - val_loss: 0.5902 - val_accuracy: 0.7097
Epoch 65/300
279/279 [==============================] - 0s 50us/step - loss: 0.2902 - accuracy: 0.8781 - val_loss: 0.5903 - val_accuracy: 0.7097

Epoch 00065: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145.
Epoch 66/300
279/279 [==============================] - 0s 61us/step - loss: 0.2898 - accuracy: 0.8781 - val_loss: 0.5902 - val_accuracy: 0.7097
Epoch 67/300
279/279 [==============================] - 0s 54us/step - loss: 0.2897 - accuracy: 0.8781 - val_loss: 0.5903 - val_accuracy: 0.7097
Epoch 68/300
279/279 [==============================] - 0s 50us/step - loss: 0.2895 - accuracy: 0.8781 - val_loss: 0.5903 - val_accuracy: 0.7097
Epoch 69/300
279/279 [==============================] - 0s 54us/step - loss: 0.2894 - accuracy: 0.8781 - val_loss: 0.5905 - val_accuracy: 0.7097
Epoch 70/300
279/279 [==============================] - 0s 54us/step - loss: 0.2892 - accuracy: 0.8781 - val_loss: 0.5905 - val_accuracy: 0.7097
Epoch 71/300
279/279 [==============================] - 0s 54us/step - loss: 0.2890 - accuracy: 0.8781 - val_loss: 0.5906 - val_accuracy: 0.7097
Epoch 72/300
279/279 [==============================] - 0s 75us/step - loss: 0.2889 - accuracy: 0.8781 - val_loss: 0.5906 - val_accuracy: 0.7097
Epoch 73/300
279/279 [==============================] - 0s 57us/step - loss: 0.2887 - accuracy: 0.8781 - val_loss: 0.5904 - val_accuracy: 0.7097
Epoch 74/300
279/279 [==============================] - 0s 54us/step - loss: 0.2885 - accuracy: 0.8781 - val_loss: 0.5906 - val_accuracy: 0.7097
Epoch 75/300
279/279 [==============================] - 0s 54us/step - loss: 0.2884 - accuracy: 0.8781 - val_loss: 0.5905 - val_accuracy: 0.7097

Epoch 00075: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05.
Epoch 76/300
279/279 [==============================] - 0s 50us/step - loss: 0.2882 - accuracy: 0.8781 - val_loss: 0.5907 - val_accuracy: 0.7097
Epoch 77/300
279/279 [==============================] - 0s 54us/step - loss: 0.2881 - accuracy: 0.8781 - val_loss: 0.5908 - val_accuracy: 0.7097
Epoch 78/300
279/279 [==============================] - 0s 54us/step - loss: 0.2880 - accuracy: 0.8781 - val_loss: 0.5908 - val_accuracy: 0.7097
Epoch 79/300
279/279 [==============================] - 0s 54us/step - loss: 0.2879 - accuracy: 0.8817 - val_loss: 0.5909 - val_accuracy: 0.7097
Epoch 80/300
279/279 [==============================] - 0s 57us/step - loss: 0.2879 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 81/300
279/279 [==============================] - 0s 79us/step - loss: 0.2878 - accuracy: 0.8817 - val_loss: 0.5909 - val_accuracy: 0.7097
Epoch 82/300
279/279 [==============================] - 0s 68us/step - loss: 0.2877 - accuracy: 0.8817 - val_loss: 0.5909 - val_accuracy: 0.7097
Epoch 83/300
279/279 [==============================] - 0s 54us/step - loss: 0.2876 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 84/300
279/279 [==============================] - 0s 57us/step - loss: 0.2875 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 85/300
279/279 [==============================] - 0s 72us/step - loss: 0.2874 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097

Epoch 00085: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05.
Epoch 86/300
279/279 [==============================] - 0s 64us/step - loss: 0.2873 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 87/300
279/279 [==============================] - 0s 57us/step - loss: 0.2873 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 88/300
279/279 [==============================] - 0s 54us/step - loss: 0.2872 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 89/300
279/279 [==============================] - 0s 50us/step - loss: 0.2872 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 90/300
279/279 [==============================] - 0s 54us/step - loss: 0.2872 - accuracy: 0.8817 - val_loss: 0.5910 - val_accuracy: 0.7097
Epoch 91/300
279/279 [==============================] - 0s 50us/step - loss: 0.2871 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 92/300
279/279 [==============================] - 0s 50us/step - loss: 0.2871 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 93/300
279/279 [==============================] - 0s 54us/step - loss: 0.2870 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 94/300
279/279 [==============================] - 0s 50us/step - loss: 0.2870 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 95/300
279/279 [==============================] - 0s 54us/step - loss: 0.2869 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097

Epoch 00095: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05.
Epoch 96/300
279/279 [==============================] - 0s 50us/step - loss: 0.2869 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 97/300
279/279 [==============================] - 0s 54us/step - loss: 0.2869 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 98/300
279/279 [==============================] - 0s 54us/step - loss: 0.2868 - accuracy: 0.8817 - val_loss: 0.5911 - val_accuracy: 0.7097
Epoch 99/300
279/279 [==============================] - 0s 50us/step - loss: 0.2868 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 100/300
279/279 [==============================] - 0s 47us/step - loss: 0.2868 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 101/300
279/279 [==============================] - 0s 54us/step - loss: 0.2868 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 102/300
279/279 [==============================] - 0s 54us/step - loss: 0.2868 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 103/300
279/279 [==============================] - 0s 54us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 104/300
279/279 [==============================] - 0s 54us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 105/300
279/279 [==============================] - 0s 50us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097

Epoch 00105: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05.
Epoch 106/300
279/279 [==============================] - 0s 50us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 107/300
279/279 [==============================] - 0s 47us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 108/300
279/279 [==============================] - 0s 47us/step - loss: 0.2867 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 109/300
279/279 [==============================] - 0s 54us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 110/300
279/279 [==============================] - 0s 50us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 111/300
279/279 [==============================] - 0s 50us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 112/300
279/279 [==============================] - 0s 75us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 113/300
279/279 [==============================] - 0s 57us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 114/300
279/279 [==============================] - 0s 57us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097
Epoch 115/300
279/279 [==============================] - 0s 50us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5912 - val_accuracy: 0.7097

Epoch 00115: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06.
Epoch 116/300
279/279 [==============================] - 0s 57us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 117/300
279/279 [==============================] - 0s 50us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 118/300
279/279 [==============================] - 0s 54us/step - loss: 0.2866 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 119/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 120/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 121/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 122/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 123/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 124/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 125/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00125: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06.
Epoch 126/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 127/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 128/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 129/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 130/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 131/300
279/279 [==============================] - ETA: 0s - loss: 0.1649 - accuracy: 0.96 - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 132/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 133/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 134/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 135/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00135: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06.
Epoch 136/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 137/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 138/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 139/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 140/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 141/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 142/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 143/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 144/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 145/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00145: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07.
Epoch 146/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 147/300
279/279 [==============================] - 0s 79us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 148/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 149/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 150/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 151/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 152/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 153/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 154/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 155/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00155: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07.
Epoch 156/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 157/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 158/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 159/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 160/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 161/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 162/300
279/279 [==============================] - 0s 82us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 163/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 164/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 165/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00165: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07.
Epoch 166/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 167/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 168/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 169/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 170/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 171/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 172/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 173/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 174/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 175/300
279/279 [==============================] - 0s 79us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00175: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08.
Epoch 176/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 177/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 178/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 179/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 180/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 181/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 182/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 183/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 184/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 185/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00185: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08.
Epoch 186/300
279/279 [==============================] - 0s 75us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 187/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 188/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 189/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 190/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 191/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 192/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 193/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 194/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 195/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00195: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08.
Epoch 196/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 197/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 198/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 199/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 200/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 201/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 202/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 203/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 204/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 205/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00205: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08.
Epoch 206/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 207/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 208/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 209/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 210/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 211/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 212/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 213/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 214/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 215/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00215: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09.
Epoch 216/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 217/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 218/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 219/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 220/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 221/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 222/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 223/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 224/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 225/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00225: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09.
Epoch 226/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 227/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 228/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 229/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 230/300
279/279 [==============================] - 0s 75us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 231/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 232/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 233/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 234/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 235/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00235: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09.
Epoch 236/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 237/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 238/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 239/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 240/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 241/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 242/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 243/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 244/300
279/279 [==============================] - 0s 86us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 245/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00245: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10.
Epoch 246/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 247/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 248/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 249/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 250/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 251/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 252/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 253/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 254/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 255/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00255: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10.
Epoch 256/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 257/300
279/279 [==============================] - 0s 47us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 258/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 259/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 260/300
279/279 [==============================] - 0s 90us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 261/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 262/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 263/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 264/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 265/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00265: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10.
Epoch 266/300
279/279 [==============================] - ETA: 0s - loss: 0.3475 - accuracy: 0.81 - 0s 75us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 267/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 268/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 269/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 270/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 271/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 272/300
279/279 [==============================] - 0s 47us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 273/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 274/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 275/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00275: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11.
Epoch 276/300
279/279 [==============================] - 0s 47us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 277/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 278/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 279/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 280/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 281/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 282/300
279/279 [==============================] - 0s 54us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 283/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 284/300
279/279 [==============================] - 0s 50us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 285/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00285: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11.
Epoch 286/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 287/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 288/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 289/300
279/279 [==============================] - 0s 86us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 290/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 291/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 292/300
279/279 [==============================] - 0s 68us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 293/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 294/300
279/279 [==============================] - 0s 57us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 295/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097

Epoch 00295: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11.
Epoch 296/300
279/279 [==============================] - 0s 64us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 297/300
279/279 [==============================] - 0s 75us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 298/300
279/279 [==============================] - 0s 72us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 299/300
279/279 [==============================] - 0s 61us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
Epoch 300/300
279/279 [==============================] - 0s 82us/step - loss: 0.2865 - accuracy: 0.8817 - val_loss: 0.5913 - val_accuracy: 0.7097
In [187]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 300)
In [188]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 43us/step
test loss: 0.5912757192247657, test accuracy: 0.7096773982048035
In [190]:
y_pred = model.predict(X_test)
y_pred_d = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred_d))
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
Kappa:  0.17860647693817455
AUC ROC:  0.6812570145903478

KMeans

In [191]:
X
Out[191]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 0.303210 1.253016 -0.394054 0.592598 0.106623 -2.083256 0.858313 0.801936 2.380580 -1.304258 0.556361 1.949530 -1.046692
1 0.647559 -1.553511 -1.648243 -1.010792 -0.857927 0.335856 0.884468 0.250703 0.298648 0.683922 1.599907 0.349480 -0.484103
2 2.266625 -0.333664 0.685765 -2.001143 -0.820018 -2.442242 -1.583451 -2.793213 -2.158376 -2.431307 -0.855856 -0.471626 -1.478884
3 0.672266 -0.397422 0.105106 -1.822060 -1.335294 -1.384110 -0.608579 -1.639581 -2.081721 -1.171877 -1.102493 -1.264328 -1.165166
4 0.747622 0.110331 -0.079109 -1.108698 -0.391749 -0.448919 0.133859 -0.843237 -1.924086 -0.222835 0.221819 0.017631 -0.725177
5 2.072441 -1.565884 -0.268750 -1.648648 -3.149219 -3.406801 -4.332461 -1.709990 -1.313696 -1.503431 -2.036749 -1.928213 -2.657430
6 -0.163654 0.470736 0.440011 0.594090 -1.227236 0.409218 0.608496 -0.948833 -2.051031 0.892327 0.371683 -0.318984 0.022251
7 0.477732 -0.217651 -0.908178 -1.257961 -1.360625 -0.551388 -0.599896 0.099066 1.299780 2.443060 0.417236 -0.919898 -0.916391
8 0.897786 -3.040013 0.311694 -0.386220 -0.321124 -0.221380 -0.207002 -0.127210 0.011073 0.211925 -0.400748 -0.296623 -0.143419
9 -0.977087 1.088438 -0.184899 -0.626934 0.577247 0.522552 1.150101 1.023214 0.136257 0.193237 -0.496760 0.367549 0.378679
10 0.328615 -1.292300 -1.398337 -0.677268 -1.070980 -0.075073 -0.740061 -0.424240 -0.216693 0.633892 -0.070397 0.960392 0.403827
11 -0.199470 0.110219 0.238637 0.455154 -0.116209 0.374450 0.078145 0.424005 0.633052 -0.153498 -0.647002 0.301135 -0.000406
12 -0.711256 0.124802 0.734425 -0.445078 -0.503247 -0.323539 0.236246 -0.572803 -0.221112 -0.206486 -0.180516 0.119335 0.027470
13 -0.806898 -0.126740 -0.383726 0.035489 -1.164460 -0.574335 -0.633858 -0.009812 -0.131411 0.549197 -0.257952 0.307916 0.814674
14 -0.077242 -0.331495 0.550493 -0.008575 -0.215759 -1.260552 -0.581296 0.369790 -0.684267 0.792489 -0.457321 -0.704205 -0.093986
15 0.244538 0.777957 0.464181 0.169574 -0.433604 -1.172185 -1.866928 0.759778 -0.372608 0.009766 0.964104 1.082661 -0.506505
16 0.602329 -0.035069 0.178352 -0.036690 0.180302 -0.769568 0.364535 0.996915 0.263984 -0.829872 -0.133422 0.601135 -1.217336
17 -0.570258 -0.759570 0.108993 0.657477 0.342355 -0.903388 0.112467 -0.669060 -0.661619 0.915675 1.620722 -0.160697 0.379275
18 -0.288268 -1.202534 -0.544058 0.295908 0.568680 -1.416228 0.423676 0.041836 -0.665694 0.699155 -0.070704 -0.429451 1.194384
19 -0.497305 -0.552590 0.332470 0.660607 0.293725 -0.945647 -1.269354 0.464095 1.166255 2.034233 2.037855 0.555927 0.423683
20 1.386141 -0.516432 -0.074640 0.751101 -1.151864 0.155819 -1.921431 -3.381158 -1.145758 -1.197084 0.654749 1.636425 0.993236
21 0.076772 0.072900 0.122544 0.799017 -1.121011 -0.137599 -1.150187 -1.669293 -1.110882 -0.047217 -0.034112 -0.659214 1.160642
22 0.670757 -0.167252 -0.352765 0.189499 -1.232602 -0.168579 -1.559900 -1.850665 -1.416478 0.031846 0.308193 -0.956133 0.507231
23 0.639283 0.699164 0.621380 -0.725771 -0.890352 0.643955 -1.097228 0.229756 -0.091793 -2.390193 -0.825768 -2.164531 -0.772983
24 -0.907399 2.155157 0.873522 1.655111 0.871099 1.083262 -0.186962 0.373227 -0.354082 0.573586 0.733097 -0.986481 0.727511
25 -1.152272 1.601470 0.221927 1.296592 0.572807 0.581774 -0.479257 0.209504 -0.636178 0.574450 0.615706 -0.877894 0.941827
26 -0.676596 0.405600 0.553370 0.691531 -0.292469 0.626694 0.080413 0.246868 -0.100975 0.606694 -0.024154 -1.553730 -0.210884
27 -0.704834 -0.058170 0.609171 -0.735340 -0.512747 0.796018 -0.405976 0.502120 0.717380 -1.625431 0.825742 -1.663942 -0.379395
28 0.273095 1.014503 -0.772750 -0.065028 -0.513485 0.235377 -0.266144 1.373964 0.711880 -1.261758 1.106463 0.515863 0.555866
29 1.216372 0.637021 0.649194 0.099873 -0.816614 0.555439 -1.272918 -0.035862 0.154194 -1.797465 -0.177830 -1.702118 -1.136716
... ... ... ... ... ... ... ... ... ... ... ... ... ...
342 0.056741 0.169776 0.434163 -0.208821 0.498957 0.349198 0.547068 0.250228 -0.743894 -0.497075 0.373497 -0.025547 0.260645
343 -0.845812 -0.163165 0.268174 1.312135 1.241686 1.484484 1.279782 -0.350179 -0.266719 -0.170434 0.147000 -0.259175 -0.323251
344 0.056854 0.089458 -0.128149 0.123107 -0.879175 0.172486 0.919301 0.727007 -0.032509 -0.594358 -0.241536 -0.339538 -1.563800
345 1.322735 -0.970372 -1.058427 1.018282 -1.228871 0.835533 1.462831 -1.481872 -2.024441 0.388890 2.395768 -0.993539 0.301816
346 -0.159679 -0.200313 -0.181878 0.221536 -0.604018 0.554979 0.173592 -1.137738 -1.525377 -0.382164 1.156959 0.545188 -0.873936
347 -0.510690 -0.141874 -0.170690 -0.486309 -1.066447 -1.098392 -1.513393 -0.202811 0.062343 0.446348 -0.029988 -0.024432 -0.978036
348 0.441393 0.403987 0.538948 1.253198 -0.158511 0.497768 0.151471 -0.006025 0.213458 0.119760 -0.002312 0.139434 -0.401118
349 0.548477 0.987769 0.505748 0.779668 0.504327 -0.003400 0.200264 0.287803 0.084852 -0.044437 0.769553 0.169816 -0.581506
350 0.278851 -0.150632 1.015313 0.158731 -1.435466 -0.910636 1.526971 0.810376 -0.088268 2.273901 1.895682 -0.573207 1.173543
351 1.781784 -0.680962 -0.140043 1.730156 0.760657 1.081874 0.686370 -0.456141 -0.310319 0.443108 0.067726 -0.804283 0.268616
352 1.110023 -0.419764 -0.451242 1.471440 0.860531 0.858025 1.016472 0.013533 -0.532955 0.597255 -0.385255 -1.299309 0.869963
353 0.463780 0.094111 0.074193 0.457058 -0.494585 -0.741218 -1.615368 -0.323890 0.179301 -0.914854 -0.881275 -0.284568 0.516848
354 0.162857 1.300630 -0.374191 -0.148478 -0.275205 0.936621 -0.301931 0.926288 -0.242039 -1.217862 -0.849053 0.381655 1.521222
355 -0.261040 1.897992 0.324175 0.250461 -0.326921 0.078347 -0.794723 1.245895 0.561437 0.299601 0.612062 0.375109 0.668225
356 -2.412627 -0.912657 0.924859 1.091412 -0.430459 0.991776 0.577087 0.366311 0.916132 -0.010096 -0.337066 0.723121 0.634413
357 -1.610420 -0.171488 1.308910 1.557149 -0.783120 1.055891 0.070922 0.736289 0.651236 -0.209692 -0.293388 0.549580 0.947465
358 -1.627642 -0.225022 1.420291 1.585386 -0.623077 1.204209 0.203574 0.815228 0.701131 -0.111706 -0.352897 0.552444 1.038487
359 1.658650 0.261694 0.694273 -0.634006 -0.742717 -1.107684 -0.040641 0.685375 0.704374 0.457634 -0.012812 -0.227444 -0.311482
360 -0.472450 1.290735 1.251486 0.902820 1.064267 0.319911 0.273062 -0.004026 -0.730129 -0.487802 -0.590033 0.917054 0.316796
361 0.145973 1.078298 -0.110458 0.396705 0.465683 0.120005 0.324478 0.647014 0.406366 0.303529 0.342183 0.418467 -0.257006
362 1.354053 0.408020 -1.449365 -0.144038 0.735070 1.458916 -0.253049 0.476118 1.309448 1.981607 0.319930 -0.734588 -2.427842
363 3.546326 -0.337767 -0.983896 -3.155084 1.922015 3.128359 1.576092 2.767242 2.734920 1.749030 -1.432287 -5.486282 -3.776088
364 3.564797 -0.492960 -0.663172 -2.465245 2.044991 3.045697 1.746383 2.238430 2.806354 2.318786 -0.732814 -5.203217 -4.762769
365 -0.480041 0.390140 0.283493 0.710367 0.436247 0.787936 0.149057 1.081200 1.130496 0.783116 1.174331 0.987069 1.027523
366 1.601344 1.120977 0.942690 0.218542 1.432015 1.975393 1.352637 0.851851 0.707687 1.420656 -1.301018 0.996552 2.286308
367 -1.388425 0.554214 2.322455 0.125526 0.168411 1.459935 -0.011567 0.377516 1.801634 1.061665 1.432895 1.553952 0.335629
368 -0.560849 0.191976 -1.558597 -1.925355 0.006144 1.582531 2.334107 1.472221 0.788027 -0.066399 -0.539592 -0.089987 1.081026
369 -0.015724 0.095384 -0.050287 0.330265 -0.857518 -1.110181 0.457976 1.235890 0.937447 1.294180 1.196429 1.964913 0.788473
370 -0.320091 0.789370 -0.347116 -0.257819 -0.264866 0.119392 0.174557 -0.102622 -0.147261 0.927949 1.494185 0.617596 -0.119013
371 -1.397911 0.969347 -0.218602 0.165675 -1.187201 -1.022111 0.090487 -0.281353 -1.520146 -0.950575 -1.733689 -0.924046 -0.731033

372 rows × 13 columns

In [60]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[60]:
[4836.0,
 4367.616182778379,
 3974.0733975762073,
 3720.4477504020774,
 3549.9823424876267,
 3386.093163495573,
 3254.3815559758523,
 3136.90996141146,
 3048.6934734702136,
 2957.186911982338,
 2855.333306370868,
 2802.0220333671496,
 2712.5965714921504,
 2657.189981994876]
In [89]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[89]:
[<matplotlib.lines.Line2D at 0x244598a8630>]

K=3

In [81]:
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[81]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [82]:
kmeans_mfcc.labels_
Out[82]:
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
       0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
       1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
       0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
       1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
       1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
       1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
       0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
       1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
       1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
In [83]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[83]:
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
       0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
       1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
       0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
       1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
       1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
       1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
       0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
       1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
       1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
In [84]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [85]:
X
Out[85]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 0.303210 1.253016 -0.394054 0.592598 0.106623 -2.083256 0.858313 0.801936 2.380580 -1.304258 0.556361 1.949530 -1.046692 1 0
1 0.647559 -1.553511 -1.648243 -1.010792 -0.857927 0.335856 0.884468 0.250703 0.298648 0.683922 1.599907 0.349480 -0.484103 1 0
2 2.266625 -0.333664 0.685765 -2.001143 -0.820018 -2.442242 -1.583451 -2.793213 -2.158376 -2.431307 -0.855856 -0.471626 -1.478884 0 0
3 0.672266 -0.397422 0.105106 -1.822060 -1.335294 -1.384110 -0.608579 -1.639581 -2.081721 -1.171877 -1.102493 -1.264328 -1.165166 0 0
4 0.747622 0.110331 -0.079109 -1.108698 -0.391749 -0.448919 0.133859 -0.843237 -1.924086 -0.222835 0.221819 0.017631 -0.725177 0 0
5 2.072441 -1.565884 -0.268750 -1.648648 -3.149219 -3.406801 -4.332461 -1.709990 -1.313696 -1.503431 -2.036749 -1.928213 -2.657430 0 0
6 -0.163654 0.470736 0.440011 0.594090 -1.227236 0.409218 0.608496 -0.948833 -2.051031 0.892327 0.371683 -0.318984 0.022251 0 0
7 0.477732 -0.217651 -0.908178 -1.257961 -1.360625 -0.551388 -0.599896 0.099066 1.299780 2.443060 0.417236 -0.919898 -0.916391 1 0
8 0.897786 -3.040013 0.311694 -0.386220 -0.321124 -0.221380 -0.207002 -0.127210 0.011073 0.211925 -0.400748 -0.296623 -0.143419 0 0
9 -0.977087 1.088438 -0.184899 -0.626934 0.577247 0.522552 1.150101 1.023214 0.136257 0.193237 -0.496760 0.367549 0.378679 1 0
10 0.328615 -1.292300 -1.398337 -0.677268 -1.070980 -0.075073 -0.740061 -0.424240 -0.216693 0.633892 -0.070397 0.960392 0.403827 0 0
11 -0.199470 0.110219 0.238637 0.455154 -0.116209 0.374450 0.078145 0.424005 0.633052 -0.153498 -0.647002 0.301135 -0.000406 1 0
12 -0.711256 0.124802 0.734425 -0.445078 -0.503247 -0.323539 0.236246 -0.572803 -0.221112 -0.206486 -0.180516 0.119335 0.027470 0 0
13 -0.806898 -0.126740 -0.383726 0.035489 -1.164460 -0.574335 -0.633858 -0.009812 -0.131411 0.549197 -0.257952 0.307916 0.814674 0 0
14 -0.077242 -0.331495 0.550493 -0.008575 -0.215759 -1.260552 -0.581296 0.369790 -0.684267 0.792489 -0.457321 -0.704205 -0.093986 0 0
15 0.244538 0.777957 0.464181 0.169574 -0.433604 -1.172185 -1.866928 0.759778 -0.372608 0.009766 0.964104 1.082661 -0.506505 0 0
16 0.602329 -0.035069 0.178352 -0.036690 0.180302 -0.769568 0.364535 0.996915 0.263984 -0.829872 -0.133422 0.601135 -1.217336 1 0
17 -0.570258 -0.759570 0.108993 0.657477 0.342355 -0.903388 0.112467 -0.669060 -0.661619 0.915675 1.620722 -0.160697 0.379275 0 0
18 -0.288268 -1.202534 -0.544058 0.295908 0.568680 -1.416228 0.423676 0.041836 -0.665694 0.699155 -0.070704 -0.429451 1.194384 0 0
19 -0.497305 -0.552590 0.332470 0.660607 0.293725 -0.945647 -1.269354 0.464095 1.166255 2.034233 2.037855 0.555927 0.423683 1 0
20 1.386141 -0.516432 -0.074640 0.751101 -1.151864 0.155819 -1.921431 -3.381158 -1.145758 -1.197084 0.654749 1.636425 0.993236 0 0
21 0.076772 0.072900 0.122544 0.799017 -1.121011 -0.137599 -1.150187 -1.669293 -1.110882 -0.047217 -0.034112 -0.659214 1.160642 0 0
22 0.670757 -0.167252 -0.352765 0.189499 -1.232602 -0.168579 -1.559900 -1.850665 -1.416478 0.031846 0.308193 -0.956133 0.507231 0 0
23 0.639283 0.699164 0.621380 -0.725771 -0.890352 0.643955 -1.097228 0.229756 -0.091793 -2.390193 -0.825768 -2.164531 -0.772983 1 0
24 -0.907399 2.155157 0.873522 1.655111 0.871099 1.083262 -0.186962 0.373227 -0.354082 0.573586 0.733097 -0.986481 0.727511 1 0
25 -1.152272 1.601470 0.221927 1.296592 0.572807 0.581774 -0.479257 0.209504 -0.636178 0.574450 0.615706 -0.877894 0.941827 1 0
26 -0.676596 0.405600 0.553370 0.691531 -0.292469 0.626694 0.080413 0.246868 -0.100975 0.606694 -0.024154 -1.553730 -0.210884 1 0
27 -0.704834 -0.058170 0.609171 -0.735340 -0.512747 0.796018 -0.405976 0.502120 0.717380 -1.625431 0.825742 -1.663942 -0.379395 1 0
28 0.273095 1.014503 -0.772750 -0.065028 -0.513485 0.235377 -0.266144 1.373964 0.711880 -1.261758 1.106463 0.515863 0.555866 1 0
29 1.216372 0.637021 0.649194 0.099873 -0.816614 0.555439 -1.272918 -0.035862 0.154194 -1.797465 -0.177830 -1.702118 -1.136716 1 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
342 0.056741 0.169776 0.434163 -0.208821 0.498957 0.349198 0.547068 0.250228 -0.743894 -0.497075 0.373497 -0.025547 0.260645 1 1
343 -0.845812 -0.163165 0.268174 1.312135 1.241686 1.484484 1.279782 -0.350179 -0.266719 -0.170434 0.147000 -0.259175 -0.323251 1 1
344 0.056854 0.089458 -0.128149 0.123107 -0.879175 0.172486 0.919301 0.727007 -0.032509 -0.594358 -0.241536 -0.339538 -1.563800 1 1
345 1.322735 -0.970372 -1.058427 1.018282 -1.228871 0.835533 1.462831 -1.481872 -2.024441 0.388890 2.395768 -0.993539 0.301816 0 1
346 -0.159679 -0.200313 -0.181878 0.221536 -0.604018 0.554979 0.173592 -1.137738 -1.525377 -0.382164 1.156959 0.545188 -0.873936 0 1
347 -0.510690 -0.141874 -0.170690 -0.486309 -1.066447 -1.098392 -1.513393 -0.202811 0.062343 0.446348 -0.029988 -0.024432 -0.978036 0 1
348 0.441393 0.403987 0.538948 1.253198 -0.158511 0.497768 0.151471 -0.006025 0.213458 0.119760 -0.002312 0.139434 -0.401118 1 1
349 0.548477 0.987769 0.505748 0.779668 0.504327 -0.003400 0.200264 0.287803 0.084852 -0.044437 0.769553 0.169816 -0.581506 1 1
350 0.278851 -0.150632 1.015313 0.158731 -1.435466 -0.910636 1.526971 0.810376 -0.088268 2.273901 1.895682 -0.573207 1.173543 1 1
351 1.781784 -0.680962 -0.140043 1.730156 0.760657 1.081874 0.686370 -0.456141 -0.310319 0.443108 0.067726 -0.804283 0.268616 1 1
352 1.110023 -0.419764 -0.451242 1.471440 0.860531 0.858025 1.016472 0.013533 -0.532955 0.597255 -0.385255 -1.299309 0.869963 1 1
353 0.463780 0.094111 0.074193 0.457058 -0.494585 -0.741218 -1.615368 -0.323890 0.179301 -0.914854 -0.881275 -0.284568 0.516848 0 1
354 0.162857 1.300630 -0.374191 -0.148478 -0.275205 0.936621 -0.301931 0.926288 -0.242039 -1.217862 -0.849053 0.381655 1.521222 1 1
355 -0.261040 1.897992 0.324175 0.250461 -0.326921 0.078347 -0.794723 1.245895 0.561437 0.299601 0.612062 0.375109 0.668225 1 1
356 -2.412627 -0.912657 0.924859 1.091412 -0.430459 0.991776 0.577087 0.366311 0.916132 -0.010096 -0.337066 0.723121 0.634413 1 1
357 -1.610420 -0.171488 1.308910 1.557149 -0.783120 1.055891 0.070922 0.736289 0.651236 -0.209692 -0.293388 0.549580 0.947465 1 1
358 -1.627642 -0.225022 1.420291 1.585386 -0.623077 1.204209 0.203574 0.815228 0.701131 -0.111706 -0.352897 0.552444 1.038487 1 1
359 1.658650 0.261694 0.694273 -0.634006 -0.742717 -1.107684 -0.040641 0.685375 0.704374 0.457634 -0.012812 -0.227444 -0.311482 1 1
360 -0.472450 1.290735 1.251486 0.902820 1.064267 0.319911 0.273062 -0.004026 -0.730129 -0.487802 -0.590033 0.917054 0.316796 1 1
361 0.145973 1.078298 -0.110458 0.396705 0.465683 0.120005 0.324478 0.647014 0.406366 0.303529 0.342183 0.418467 -0.257006 1 1
362 1.354053 0.408020 -1.449365 -0.144038 0.735070 1.458916 -0.253049 0.476118 1.309448 1.981607 0.319930 -0.734588 -2.427842 1 1
363 3.546326 -0.337767 -0.983896 -3.155084 1.922015 3.128359 1.576092 2.767242 2.734920 1.749030 -1.432287 -5.486282 -3.776088 2 1
364 3.564797 -0.492960 -0.663172 -2.465245 2.044991 3.045697 1.746383 2.238430 2.806354 2.318786 -0.732814 -5.203217 -4.762769 2 1
365 -0.480041 0.390140 0.283493 0.710367 0.436247 0.787936 0.149057 1.081200 1.130496 0.783116 1.174331 0.987069 1.027523 1 1
366 1.601344 1.120977 0.942690 0.218542 1.432015 1.975393 1.352637 0.851851 0.707687 1.420656 -1.301018 0.996552 2.286308 1 1
367 -1.388425 0.554214 2.322455 0.125526 0.168411 1.459935 -0.011567 0.377516 1.801634 1.061665 1.432895 1.553952 0.335629 1 1
368 -0.560849 0.191976 -1.558597 -1.925355 0.006144 1.582531 2.334107 1.472221 0.788027 -0.066399 -0.539592 -0.089987 1.081026 1 1
369 -0.015724 0.095384 -0.050287 0.330265 -0.857518 -1.110181 0.457976 1.235890 0.937447 1.294180 1.196429 1.964913 0.788473 1 1
370 -0.320091 0.789370 -0.347116 -0.257819 -0.264866 0.119392 0.174557 -0.102622 -0.147261 0.927949 1.494185 0.617596 -0.119013 1 1
371 -1.397911 0.969347 -0.218602 0.165675 -1.187201 -1.022111 0.090487 -0.281353 -1.520146 -0.950575 -1.733689 -0.924046 -0.731033 0 1

372 rows × 15 columns

In [86]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[86]:
<matplotlib.axes._subplots.AxesSubplot at 0x2445d307358>
In [16]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[1]))

Club De Banqueros y Empresarios

ANN

In [193]:
X = df_n_ps_std_mfcc[1]
In [194]:
y = df_n_ps[1]['chosen']
In [195]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [196]:
X_train.shape
Out[196]:
(191, 13)
In [47]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [48]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [49]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [50]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.01, 'max_iter': 2000}, que permiten obtener un Accuracy de 81.68% y un Kappa del 52.97
Tiempo total: 25.07 minutos
In [197]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20,20,20]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.01
epochs = 2000
In [198]:
input_tensor = Input(shape = (n0,))
In [199]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [200]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [201]:
model.summary()
Model: "model_7"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_7 (InputLayer)         (None, 13)                0         
_________________________________________________________________
dense_19 (Dense)             (None, 20)                280       
_________________________________________________________________
dense_20 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_21 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_22 (Dense)             (None, 1)                 21        
=================================================================
Total params: 1,141
Trainable params: 1,141
Non-trainable params: 0
_________________________________________________________________
In [202]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 191 samples, validate on 64 samples
Epoch 1/2000
191/191 [==============================] - 0s 1ms/step - loss: 0.6261 - accuracy: 0.6387 - val_loss: 0.5919 - val_accuracy: 0.7031
Epoch 2/2000
191/191 [==============================] - 0s 63us/step - loss: 0.4675 - accuracy: 0.7487 - val_loss: 0.5618 - val_accuracy: 0.7188
Epoch 3/2000
191/191 [==============================] - 0s 68us/step - loss: 0.4415 - accuracy: 0.7906 - val_loss: 0.6494 - val_accuracy: 0.7500
Epoch 4/2000
191/191 [==============================] - 0s 68us/step - loss: 0.3992 - accuracy: 0.8220 - val_loss: 0.6424 - val_accuracy: 0.7188
Epoch 5/2000
191/191 [==============================] - 0s 68us/step - loss: 0.3713 - accuracy: 0.8429 - val_loss: 0.6325 - val_accuracy: 0.7344
Epoch 6/2000
191/191 [==============================] - 0s 63us/step - loss: 0.3457 - accuracy: 0.8586 - val_loss: 0.6120 - val_accuracy: 0.7188
Epoch 7/2000
191/191 [==============================] - 0s 58us/step - loss: 0.3136 - accuracy: 0.8901 - val_loss: 0.6513 - val_accuracy: 0.7344
Epoch 8/2000
191/191 [==============================] - 0s 58us/step - loss: 0.2935 - accuracy: 0.9005 - val_loss: 0.6515 - val_accuracy: 0.7344
Epoch 9/2000
191/191 [==============================] - 0s 63us/step - loss: 0.2669 - accuracy: 0.8953 - val_loss: 0.6463 - val_accuracy: 0.7188
Epoch 10/2000
191/191 [==============================] - 0s 58us/step - loss: 0.2335 - accuracy: 0.9162 - val_loss: 0.6499 - val_accuracy: 0.7188
Epoch 11/2000
191/191 [==============================] - 0s 78us/step - loss: 0.2106 - accuracy: 0.9267 - val_loss: 0.7244 - val_accuracy: 0.7344
Epoch 12/2000
191/191 [==============================] - 0s 63us/step - loss: 0.1772 - accuracy: 0.9372 - val_loss: 0.7337 - val_accuracy: 0.7344
Epoch 13/2000
191/191 [==============================] - 0s 63us/step - loss: 0.1720 - accuracy: 0.9319 - val_loss: 0.8111 - val_accuracy: 0.7031

Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.004999999888241291.
Epoch 14/2000
191/191 [==============================] - 0s 58us/step - loss: 0.1282 - accuracy: 0.9686 - val_loss: 0.7748 - val_accuracy: 0.7500
Epoch 15/2000
191/191 [==============================] - 0s 68us/step - loss: 0.1112 - accuracy: 0.9791 - val_loss: 0.8126 - val_accuracy: 0.7500
Epoch 16/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0983 - accuracy: 0.9791 - val_loss: 0.8576 - val_accuracy: 0.7500
Epoch 17/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0847 - accuracy: 0.9895 - val_loss: 0.8517 - val_accuracy: 0.7344
Epoch 18/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0736 - accuracy: 0.9948 - val_loss: 0.8483 - val_accuracy: 0.7656
Epoch 19/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0656 - accuracy: 0.9948 - val_loss: 0.9130 - val_accuracy: 0.7188
Epoch 20/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0547 - accuracy: 1.0000 - val_loss: 0.9393 - val_accuracy: 0.7188
Epoch 21/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0486 - accuracy: 1.0000 - val_loss: 0.9333 - val_accuracy: 0.7500
Epoch 22/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0381 - accuracy: 1.0000 - val_loss: 1.0137 - val_accuracy: 0.7188
Epoch 23/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0341 - accuracy: 1.0000 - val_loss: 1.0446 - val_accuracy: 0.7344
Epoch 24/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0278 - accuracy: 1.0000 - val_loss: 1.0305 - val_accuracy: 0.7500
Epoch 25/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0247 - accuracy: 1.0000 - val_loss: 1.0528 - val_accuracy: 0.7188
Epoch 26/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0211 - accuracy: 1.0000 - val_loss: 1.1001 - val_accuracy: 0.7500
Epoch 27/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0180 - accuracy: 1.0000 - val_loss: 1.1188 - val_accuracy: 0.7500
Epoch 28/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0159 - accuracy: 1.0000 - val_loss: 1.1365 - val_accuracy: 0.7344

Epoch 00028: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455.
Epoch 29/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0143 - accuracy: 1.0000 - val_loss: 1.1481 - val_accuracy: 0.7344
Epoch 30/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0130 - accuracy: 1.0000 - val_loss: 1.1627 - val_accuracy: 0.7500
Epoch 31/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0125 - accuracy: 1.0000 - val_loss: 1.1728 - val_accuracy: 0.7500
Epoch 32/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0118 - accuracy: 1.0000 - val_loss: 1.1814 - val_accuracy: 0.7500
Epoch 33/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0111 - accuracy: 1.0000 - val_loss: 1.1954 - val_accuracy: 0.7500
Epoch 34/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0107 - accuracy: 1.0000 - val_loss: 1.2078 - val_accuracy: 0.7344
Epoch 35/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0101 - accuracy: 1.0000 - val_loss: 1.2148 - val_accuracy: 0.7500
Epoch 36/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0096 - accuracy: 1.0000 - val_loss: 1.2184 - val_accuracy: 0.7344
Epoch 37/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0091 - accuracy: 1.0000 - val_loss: 1.2278 - val_accuracy: 0.7344
Epoch 38/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0087 - accuracy: 1.0000 - val_loss: 1.2391 - val_accuracy: 0.7344

Epoch 00038: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228.
Epoch 39/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0083 - accuracy: 1.0000 - val_loss: 1.2467 - val_accuracy: 0.7344
Epoch 40/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0081 - accuracy: 1.0000 - val_loss: 1.2504 - val_accuracy: 0.7344
Epoch 41/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0079 - accuracy: 1.0000 - val_loss: 1.2559 - val_accuracy: 0.7344
Epoch 42/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0077 - accuracy: 1.0000 - val_loss: 1.2602 - val_accuracy: 0.7344
Epoch 43/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0076 - accuracy: 1.0000 - val_loss: 1.2626 - val_accuracy: 0.7500
Epoch 44/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0074 - accuracy: 1.0000 - val_loss: 1.2679 - val_accuracy: 0.7344
Epoch 45/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0072 - accuracy: 1.0000 - val_loss: 1.2688 - val_accuracy: 0.7344
Epoch 46/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0071 - accuracy: 1.0000 - val_loss: 1.2745 - val_accuracy: 0.7344
Epoch 47/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0069 - accuracy: 1.0000 - val_loss: 1.2803 - val_accuracy: 0.7344
Epoch 48/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0067 - accuracy: 1.0000 - val_loss: 1.2839 - val_accuracy: 0.7344

Epoch 00048: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614.
Epoch 49/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0066 - accuracy: 1.0000 - val_loss: 1.2861 - val_accuracy: 0.7344
Epoch 50/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0066 - accuracy: 1.0000 - val_loss: 1.2886 - val_accuracy: 0.7344
Epoch 51/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0065 - accuracy: 1.0000 - val_loss: 1.2901 - val_accuracy: 0.7344
Epoch 52/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0064 - accuracy: 1.0000 - val_loss: 1.2916 - val_accuracy: 0.7344
Epoch 53/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0063 - accuracy: 1.0000 - val_loss: 1.2941 - val_accuracy: 0.7344
Epoch 54/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0063 - accuracy: 1.0000 - val_loss: 1.2960 - val_accuracy: 0.7344
Epoch 55/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0062 - accuracy: 1.0000 - val_loss: 1.2973 - val_accuracy: 0.7344
Epoch 56/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0061 - accuracy: 1.0000 - val_loss: 1.2994 - val_accuracy: 0.7344
Epoch 57/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0061 - accuracy: 1.0000 - val_loss: 1.3012 - val_accuracy: 0.7344
Epoch 58/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0060 - accuracy: 1.0000 - val_loss: 1.3040 - val_accuracy: 0.7344

Epoch 00058: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807.
Epoch 59/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0059 - accuracy: 1.0000 - val_loss: 1.3054 - val_accuracy: 0.7344
Epoch 60/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0059 - accuracy: 1.0000 - val_loss: 1.3067 - val_accuracy: 0.7344
Epoch 61/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0059 - accuracy: 1.0000 - val_loss: 1.3080 - val_accuracy: 0.7344
Epoch 62/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0058 - accuracy: 1.0000 - val_loss: 1.3096 - val_accuracy: 0.7344
Epoch 63/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0058 - accuracy: 1.0000 - val_loss: 1.3105 - val_accuracy: 0.7344
Epoch 64/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0058 - accuracy: 1.0000 - val_loss: 1.3117 - val_accuracy: 0.7344
Epoch 65/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0057 - accuracy: 1.0000 - val_loss: 1.3128 - val_accuracy: 0.7344
Epoch 66/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0057 - accuracy: 1.0000 - val_loss: 1.3135 - val_accuracy: 0.7344
Epoch 67/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0057 - accuracy: 1.0000 - val_loss: 1.3143 - val_accuracy: 0.7344
Epoch 68/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0057 - accuracy: 1.0000 - val_loss: 1.3152 - val_accuracy: 0.7344

Epoch 00068: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035.
Epoch 69/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0056 - accuracy: 1.0000 - val_loss: 1.3161 - val_accuracy: 0.7344
Epoch 70/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0056 - accuracy: 1.0000 - val_loss: 1.3164 - val_accuracy: 0.7344
Epoch 71/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0056 - accuracy: 1.0000 - val_loss: 1.3170 - val_accuracy: 0.7344
Epoch 72/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0056 - accuracy: 1.0000 - val_loss: 1.3178 - val_accuracy: 0.7344
Epoch 73/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3184 - val_accuracy: 0.7344
Epoch 74/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3189 - val_accuracy: 0.7344
Epoch 75/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3193 - val_accuracy: 0.7344
Epoch 76/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3203 - val_accuracy: 0.7344
Epoch 77/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3209 - val_accuracy: 0.7344
Epoch 78/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3217 - val_accuracy: 0.7344

Epoch 00078: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05.
Epoch 79/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 1.3219 - val_accuracy: 0.7344
Epoch 80/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3223 - val_accuracy: 0.7344
Epoch 81/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3226 - val_accuracy: 0.7344
Epoch 82/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3229 - val_accuracy: 0.7344
Epoch 83/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3233 - val_accuracy: 0.7344
Epoch 84/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3235 - val_accuracy: 0.7344
Epoch 85/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3239 - val_accuracy: 0.7344
Epoch 86/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3242 - val_accuracy: 0.7344
Epoch 87/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3245 - val_accuracy: 0.7344
Epoch 88/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3248 - val_accuracy: 0.7344

Epoch 00088: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05.
Epoch 89/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3250 - val_accuracy: 0.7344
Epoch 90/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3251 - val_accuracy: 0.7344
Epoch 91/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3252 - val_accuracy: 0.7344
Epoch 92/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.3255 - val_accuracy: 0.7344
Epoch 93/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3256 - val_accuracy: 0.7344
Epoch 94/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3257 - val_accuracy: 0.7344
Epoch 95/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3258 - val_accuracy: 0.7344
Epoch 96/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3260 - val_accuracy: 0.7344
Epoch 97/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3262 - val_accuracy: 0.7344
Epoch 98/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3264 - val_accuracy: 0.7344

Epoch 00098: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05.
Epoch 99/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3265 - val_accuracy: 0.7344
Epoch 100/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3266 - val_accuracy: 0.7344
Epoch 101/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3267 - val_accuracy: 0.7344
Epoch 102/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3267 - val_accuracy: 0.7344
Epoch 103/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3268 - val_accuracy: 0.7344
Epoch 104/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3269 - val_accuracy: 0.7344
Epoch 105/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3270 - val_accuracy: 0.7344
Epoch 106/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3271 - val_accuracy: 0.7344
Epoch 107/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3272 - val_accuracy: 0.7344
Epoch 108/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3273 - val_accuracy: 0.7344

Epoch 00108: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06.
Epoch 109/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3273 - val_accuracy: 0.7344
Epoch 110/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3274 - val_accuracy: 0.7344
Epoch 111/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3274 - val_accuracy: 0.7344
Epoch 112/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3274 - val_accuracy: 0.7344
Epoch 113/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3275 - val_accuracy: 0.7344
Epoch 114/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3275 - val_accuracy: 0.7344
Epoch 115/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3276 - val_accuracy: 0.7344
Epoch 116/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3276 - val_accuracy: 0.7344
Epoch 117/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3277 - val_accuracy: 0.7344
Epoch 118/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3277 - val_accuracy: 0.7344

Epoch 00118: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06.
Epoch 119/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3278 - val_accuracy: 0.7344
Epoch 120/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3278 - val_accuracy: 0.7344
Epoch 121/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3278 - val_accuracy: 0.7344
Epoch 122/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3278 - val_accuracy: 0.7344
Epoch 123/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3278 - val_accuracy: 0.7344
Epoch 124/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3279 - val_accuracy: 0.7344
Epoch 125/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3279 - val_accuracy: 0.7344
Epoch 126/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3279 - val_accuracy: 0.7344
Epoch 127/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3279 - val_accuracy: 0.7344
Epoch 128/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344

Epoch 00128: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06.
Epoch 129/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 130/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 131/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 132/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 133/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 134/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3280 - val_accuracy: 0.7344
Epoch 135/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 136/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 137/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 138/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344

Epoch 00138: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06.
Epoch 139/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 140/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 141/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 142/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 143/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 144/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 145/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 146/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3281 - val_accuracy: 0.7344
Epoch 147/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 148/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00148: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07.
Epoch 149/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 150/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 151/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 152/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 153/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 154/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 155/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 156/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 157/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 158/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00158: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07.
Epoch 159/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 160/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 161/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 162/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 163/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 164/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 165/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 166/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 167/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 168/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00168: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07.
Epoch 169/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 170/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 171/2000
191/191 [==============================] - ETA: 0s - loss: 0.0035 - accuracy: 1.00 - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 172/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 173/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 174/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 175/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 176/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 177/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 178/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00178: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08.
Epoch 179/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 180/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 181/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 182/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 183/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 184/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 185/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 186/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 187/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 188/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00188: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08.
Epoch 189/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 190/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 191/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 192/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 193/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 194/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 195/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 196/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 197/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 198/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00198: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08.
Epoch 199/2000
191/191 [==============================] - 0s 52us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 200/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 201/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 202/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 203/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 204/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 205/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 206/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 207/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 208/2000
191/191 [==============================] - 0s 79us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00208: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09.
Epoch 209/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 210/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 211/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 212/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 213/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 214/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 215/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 216/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 217/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 218/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00218: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09.
Epoch 219/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 220/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 221/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 222/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 223/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 224/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 225/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 226/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 227/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 228/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00228: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09.
Epoch 229/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 230/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 231/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 232/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 233/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 234/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 235/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 236/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 237/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 238/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00238: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09.
Epoch 239/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 240/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 241/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 242/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 243/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 244/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 245/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 246/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 247/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 248/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00248: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10.
Epoch 249/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 250/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 251/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 252/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 253/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 254/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 255/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 256/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 257/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 258/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00258: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10.
Epoch 259/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 260/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 261/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 262/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 263/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 264/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 265/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 266/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 267/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 268/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00268: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10.
Epoch 269/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 270/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 271/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 272/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 273/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 274/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 275/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 276/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 277/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 278/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00278: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11.
Epoch 279/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 280/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 281/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 282/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 283/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 284/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 285/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 286/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 287/2000
191/191 [==============================] - 0s 79us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 288/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00288: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11.
Epoch 289/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 290/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 291/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 292/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 293/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 294/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 295/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 296/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 297/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 298/2000
191/191 [==============================] - ETA: 0s - loss: 0.0064 - accuracy: 1.00 - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00298: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11.
Epoch 299/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 300/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 301/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 302/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 303/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 304/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 305/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 306/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 307/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 308/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00308: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12.
Epoch 309/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 310/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 311/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 312/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 313/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 314/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 315/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 316/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 317/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 318/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00318: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12.
Epoch 319/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 320/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 321/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 322/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 323/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 324/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 325/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 326/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 327/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 328/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00328: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12.
Epoch 329/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 330/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 331/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 332/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 333/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 334/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 335/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 336/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 337/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 338/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00338: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12.
Epoch 339/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 340/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 341/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 342/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 343/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 344/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 345/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 346/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 347/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 348/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00348: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13.
Epoch 349/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 350/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 351/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 352/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 353/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 354/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 355/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 356/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 357/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 358/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00358: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13.
Epoch 359/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 360/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 361/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 362/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 363/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 364/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 365/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 366/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 367/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 368/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00368: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13.
Epoch 369/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 370/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 371/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 372/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 373/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 374/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 375/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 376/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 377/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 378/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00378: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14.
Epoch 379/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 380/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 381/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 382/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 383/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 384/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 385/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 386/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 387/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 388/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00388: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14.
Epoch 389/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 390/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 391/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 392/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 393/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 394/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 395/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 396/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 397/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 398/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00398: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14.
Epoch 399/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 400/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 401/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 402/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 403/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 404/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 405/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 406/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 407/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 408/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00408: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15.
Epoch 409/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 410/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 411/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 412/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 413/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 414/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 415/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 416/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 417/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 418/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00418: ReduceLROnPlateau reducing learning rate to 4.5474734072206875e-15.
Epoch 419/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 420/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 421/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 422/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 423/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 424/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 425/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 426/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 427/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 428/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00428: ReduceLROnPlateau reducing learning rate to 2.2737367036103438e-15.
Epoch 429/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 430/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 431/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 432/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 433/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 434/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 435/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 436/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 437/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 438/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00438: ReduceLROnPlateau reducing learning rate to 1.1368683518051719e-15.
Epoch 439/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 440/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 441/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 442/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 443/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 444/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 445/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 446/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 447/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 448/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00448: ReduceLROnPlateau reducing learning rate to 5.684341759025859e-16.
Epoch 449/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 450/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 451/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 452/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 453/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 454/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 455/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 456/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 457/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 458/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00458: ReduceLROnPlateau reducing learning rate to 2.8421708795129297e-16.
Epoch 459/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 460/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 461/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 462/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 463/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 464/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 465/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 466/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 467/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 468/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00468: ReduceLROnPlateau reducing learning rate to 1.4210854397564648e-16.
Epoch 469/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 470/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 471/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 472/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 473/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 474/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 475/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 476/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 477/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 478/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00478: ReduceLROnPlateau reducing learning rate to 7.105427198782324e-17.
Epoch 479/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 480/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 481/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 482/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 483/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 484/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 485/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 486/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 487/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 488/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00488: ReduceLROnPlateau reducing learning rate to 3.552713599391162e-17.
Epoch 489/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 490/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 491/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 492/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 493/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 494/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 495/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 496/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 497/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 498/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00498: ReduceLROnPlateau reducing learning rate to 1.776356799695581e-17.
Epoch 499/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 500/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 501/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 502/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 503/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 504/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 505/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 506/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 507/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 508/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00508: ReduceLROnPlateau reducing learning rate to 8.881783998477905e-18.
Epoch 509/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 510/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 511/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 512/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 513/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 514/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 515/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 516/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 517/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 518/2000
191/191 [==============================] - 0s 52us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00518: ReduceLROnPlateau reducing learning rate to 4.440891999238953e-18.
Epoch 519/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 520/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 521/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 522/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 523/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 524/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 525/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 526/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 527/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 528/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00528: ReduceLROnPlateau reducing learning rate to 2.2204459996194763e-18.
Epoch 529/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 530/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 531/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 532/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 533/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 534/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 535/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 536/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 537/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 538/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00538: ReduceLROnPlateau reducing learning rate to 1.1102229998097382e-18.
Epoch 539/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 540/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 541/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 542/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 543/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 544/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 545/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 546/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 547/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 548/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00548: ReduceLROnPlateau reducing learning rate to 5.551114999048691e-19.
Epoch 549/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 550/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 551/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 552/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 553/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 554/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 555/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 556/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 557/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 558/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00558: ReduceLROnPlateau reducing learning rate to 2.7755574995243454e-19.
Epoch 559/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 560/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 561/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 562/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 563/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 564/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 565/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 566/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 567/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 568/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00568: ReduceLROnPlateau reducing learning rate to 1.3877787497621727e-19.
Epoch 569/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 570/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 571/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 572/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 573/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 574/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 575/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 576/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 577/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 578/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00578: ReduceLROnPlateau reducing learning rate to 6.938893748810864e-20.
Epoch 579/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 580/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 581/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 582/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 583/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 584/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 585/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 586/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 587/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 588/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00588: ReduceLROnPlateau reducing learning rate to 3.469446874405432e-20.
Epoch 589/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 590/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 591/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 592/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 593/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 594/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 595/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 596/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 597/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 598/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00598: ReduceLROnPlateau reducing learning rate to 1.734723437202716e-20.
Epoch 599/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 600/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 601/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 602/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 603/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 604/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 605/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 606/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 607/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 608/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00608: ReduceLROnPlateau reducing learning rate to 8.67361718601358e-21.
Epoch 609/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 610/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 611/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 612/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 613/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 614/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 615/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 616/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 617/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 618/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00618: ReduceLROnPlateau reducing learning rate to 4.33680859300679e-21.
Epoch 619/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 620/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 621/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 622/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 623/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 624/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 625/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 626/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 627/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 628/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00628: ReduceLROnPlateau reducing learning rate to 2.168404296503395e-21.
Epoch 629/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 630/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 631/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 632/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 633/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 634/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 635/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 636/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 637/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 638/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00638: ReduceLROnPlateau reducing learning rate to 1.0842021482516974e-21.
Epoch 639/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 640/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 641/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 642/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 643/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 644/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 645/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 646/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 647/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 648/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00648: ReduceLROnPlateau reducing learning rate to 5.421010741258487e-22.
Epoch 649/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 650/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 651/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 652/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 653/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 654/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 655/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 656/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 657/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 658/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00658: ReduceLROnPlateau reducing learning rate to 2.7105053706292436e-22.
Epoch 659/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 660/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 661/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 662/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 663/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 664/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 665/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 666/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 667/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 668/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00668: ReduceLROnPlateau reducing learning rate to 1.3552526853146218e-22.
Epoch 669/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 670/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 671/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 672/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 673/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 674/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 675/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 676/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 677/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 678/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00678: ReduceLROnPlateau reducing learning rate to 6.776263426573109e-23.
Epoch 679/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 680/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 681/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 682/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 683/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 684/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 685/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 686/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 687/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 688/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00688: ReduceLROnPlateau reducing learning rate to 3.3881317132865545e-23.
Epoch 689/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 690/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 691/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 692/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 693/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 694/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 695/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 696/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 697/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 698/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00698: ReduceLROnPlateau reducing learning rate to 1.6940658566432772e-23.
Epoch 699/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 700/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 701/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 702/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 703/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 704/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 705/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 706/2000
191/191 [==============================] - 0s 209us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 707/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 708/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00708: ReduceLROnPlateau reducing learning rate to 8.470329283216386e-24.
Epoch 709/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 710/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 711/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 712/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 713/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 714/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 715/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 716/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 717/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 718/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00718: ReduceLROnPlateau reducing learning rate to 4.235164641608193e-24.
Epoch 719/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 720/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 721/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 722/2000
191/191 [==============================] - ETA: 0s - loss: 0.0058 - accuracy: 1.00 - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 723/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 724/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 725/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 726/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 727/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 728/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00728: ReduceLROnPlateau reducing learning rate to 2.1175823208040965e-24.
Epoch 729/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 730/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 731/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 732/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 733/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 734/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 735/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 736/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 737/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 738/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00738: ReduceLROnPlateau reducing learning rate to 1.0587911604020483e-24.
Epoch 739/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 740/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 741/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 742/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 743/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 744/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 745/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 746/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 747/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 748/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00748: ReduceLROnPlateau reducing learning rate to 5.293955802010241e-25.
Epoch 749/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 750/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 751/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 752/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 753/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 754/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 755/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 756/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 757/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 758/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00758: ReduceLROnPlateau reducing learning rate to 2.6469779010051207e-25.
Epoch 759/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 760/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 761/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 762/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 763/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 764/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 765/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 766/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 767/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 768/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00768: ReduceLROnPlateau reducing learning rate to 1.3234889505025603e-25.
Epoch 769/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 770/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 771/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 772/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 773/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 774/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 775/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 776/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 777/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 778/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00778: ReduceLROnPlateau reducing learning rate to 6.617444752512802e-26.
Epoch 779/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 780/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 781/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 782/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 783/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 784/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 785/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 786/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 787/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 788/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00788: ReduceLROnPlateau reducing learning rate to 3.308722376256401e-26.
Epoch 789/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 790/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 791/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 792/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 793/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 794/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 795/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 796/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 797/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 798/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00798: ReduceLROnPlateau reducing learning rate to 1.6543611881282004e-26.
Epoch 799/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 800/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 801/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 802/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 803/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 804/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 805/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 806/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 807/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 808/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00808: ReduceLROnPlateau reducing learning rate to 8.271805940641002e-27.
Epoch 809/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 810/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 811/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 812/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 813/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 814/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 815/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 816/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 817/2000
191/191 [==============================] - 0s 68us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 818/2000
191/191 [==============================] - 0s 58us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00818: ReduceLROnPlateau reducing learning rate to 4.135902970320501e-27.
Epoch 819/2000
191/191 [==============================] - 0s 63us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 820/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 821/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 822/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 823/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 824/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 825/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 826/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 827/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 828/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00828: ReduceLROnPlateau reducing learning rate to 2.0679514851602505e-27.
Epoch 829/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 830/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 831/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 832/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 833/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 834/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 835/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 836/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 837/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 838/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00838: ReduceLROnPlateau reducing learning rate to 1.0339757425801253e-27.
Epoch 839/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 840/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 841/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 842/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 843/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 844/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 845/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 846/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 847/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 848/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00848: ReduceLROnPlateau reducing learning rate to 5.169878712900626e-28.
Epoch 849/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 850/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 851/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 852/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 853/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 854/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 855/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 856/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 857/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 858/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00858: ReduceLROnPlateau reducing learning rate to 2.584939356450313e-28.
Epoch 859/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 860/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 861/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 862/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 863/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 864/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 865/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 866/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 867/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 868/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00868: ReduceLROnPlateau reducing learning rate to 1.2924696782251566e-28.
Epoch 869/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 870/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 871/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 872/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 873/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 874/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 875/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 876/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 877/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 878/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00878: ReduceLROnPlateau reducing learning rate to 6.462348391125783e-29.
Epoch 879/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 880/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 881/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 882/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 883/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 884/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 885/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 886/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 887/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 888/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00888: ReduceLROnPlateau reducing learning rate to 3.2311741955628914e-29.
Epoch 889/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 890/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 891/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 892/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 893/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 894/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 895/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 896/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 897/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 898/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00898: ReduceLROnPlateau reducing learning rate to 1.6155870977814457e-29.
Epoch 899/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 900/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 901/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 902/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 903/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 904/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 905/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 906/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 907/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 908/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00908: ReduceLROnPlateau reducing learning rate to 8.077935488907229e-30.
Epoch 909/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 910/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 911/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 912/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 913/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 914/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 915/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 916/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 917/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 918/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00918: ReduceLROnPlateau reducing learning rate to 4.038967744453614e-30.
Epoch 919/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 920/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 921/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 922/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 923/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 924/2000
191/191 [==============================] - ETA: 0s - loss: 0.0058 - accuracy: 1.00 - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 925/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 926/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 927/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 928/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00928: ReduceLROnPlateau reducing learning rate to 2.019483872226807e-30.
Epoch 929/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 930/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 931/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 932/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 933/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 934/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 935/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 936/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 937/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 938/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00938: ReduceLROnPlateau reducing learning rate to 1.0097419361134036e-30.
Epoch 939/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 940/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 941/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 942/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 943/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 944/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 945/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 946/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 947/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 948/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00948: ReduceLROnPlateau reducing learning rate to 5.048709680567018e-31.
Epoch 949/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 950/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 951/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 952/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 953/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 954/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 955/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 956/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 957/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 958/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00958: ReduceLROnPlateau reducing learning rate to 2.524354840283509e-31.
Epoch 959/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 960/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 961/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 962/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 963/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 964/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 965/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 966/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 967/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 968/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00968: ReduceLROnPlateau reducing learning rate to 1.2621774201417545e-31.
Epoch 969/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 970/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 971/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 972/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 973/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 974/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 975/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 976/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 977/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 978/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00978: ReduceLROnPlateau reducing learning rate to 6.310887100708772e-32.
Epoch 979/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 980/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 981/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 982/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 983/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 984/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 985/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 986/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 987/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 988/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00988: ReduceLROnPlateau reducing learning rate to 3.155443550354386e-32.
Epoch 989/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 990/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 991/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 992/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 993/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 994/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 995/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 996/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 997/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 998/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 00998: ReduceLROnPlateau reducing learning rate to 1.577721775177193e-32.
Epoch 999/2000
191/191 [==============================] - 0s 157us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1000/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1001/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1002/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1003/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1004/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1005/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1006/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1007/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1008/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01008: ReduceLROnPlateau reducing learning rate to 7.888608875885965e-33.
Epoch 1009/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1010/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1011/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1012/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1013/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1014/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1015/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1016/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1017/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1018/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01018: ReduceLROnPlateau reducing learning rate to 3.944304437942983e-33.
Epoch 1019/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1020/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1021/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1022/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1023/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1024/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1025/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1026/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1027/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1028/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01028: ReduceLROnPlateau reducing learning rate to 1.9721522189714914e-33.
Epoch 1029/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1030/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1031/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1032/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1033/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1034/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1035/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1036/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1037/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1038/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01038: ReduceLROnPlateau reducing learning rate to 9.860761094857457e-34.
Epoch 1039/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1040/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1041/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1042/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1043/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1044/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1045/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1046/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1047/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1048/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01048: ReduceLROnPlateau reducing learning rate to 4.930380547428728e-34.
Epoch 1049/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1050/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1051/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1052/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1053/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1054/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1055/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1056/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1057/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1058/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01058: ReduceLROnPlateau reducing learning rate to 2.465190273714364e-34.
Epoch 1059/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1060/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1061/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1062/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1063/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1064/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1065/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1066/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1067/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1068/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01068: ReduceLROnPlateau reducing learning rate to 1.232595136857182e-34.
Epoch 1069/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1070/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1071/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1072/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1073/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1074/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1075/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1076/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1077/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1078/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01078: ReduceLROnPlateau reducing learning rate to 6.16297568428591e-35.
Epoch 1079/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1080/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1081/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1082/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1083/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1084/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1085/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1086/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1087/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1088/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01088: ReduceLROnPlateau reducing learning rate to 3.081487842142955e-35.
Epoch 1089/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1090/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1091/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1092/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1093/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1094/2000
191/191 [==============================] - 0s 79us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1095/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1096/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1097/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1098/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01098: ReduceLROnPlateau reducing learning rate to 1.5407439210714776e-35.
Epoch 1099/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1100/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1101/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1102/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1103/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1104/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1105/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1106/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1107/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1108/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01108: ReduceLROnPlateau reducing learning rate to 7.703719605357388e-36.
Epoch 1109/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1110/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1111/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1112/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1113/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1114/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1115/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1116/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1117/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1118/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01118: ReduceLROnPlateau reducing learning rate to 3.851859802678694e-36.
Epoch 1119/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1120/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1121/2000
191/191 [==============================] - 0s 136us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1122/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1123/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1124/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1125/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1126/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1127/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1128/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01128: ReduceLROnPlateau reducing learning rate to 1.925929901339347e-36.
Epoch 1129/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1130/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1131/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1132/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1133/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1134/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1135/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1136/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1137/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1138/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01138: ReduceLROnPlateau reducing learning rate to 9.629649506696735e-37.
Epoch 1139/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1140/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1141/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1142/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1143/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1144/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1145/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1146/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1147/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1148/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01148: ReduceLROnPlateau reducing learning rate to 4.8148247533483676e-37.
Epoch 1149/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1150/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1151/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1152/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1153/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1154/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1155/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1156/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1157/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1158/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01158: ReduceLROnPlateau reducing learning rate to 2.4074123766741838e-37.
Epoch 1159/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1160/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1161/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1162/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1163/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1164/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1165/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1166/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1167/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1168/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01168: ReduceLROnPlateau reducing learning rate to 1.2037061883370919e-37.
Epoch 1169/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1170/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1171/2000
191/191 [==============================] - 0s 147us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1172/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1173/2000
191/191 [==============================] - 0s 162us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1174/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1175/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1176/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1177/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1178/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01178: ReduceLROnPlateau reducing learning rate to 6.018530941685459e-38.
Epoch 1179/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1180/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1181/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1182/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1183/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1184/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1185/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1186/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1187/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1188/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01188: ReduceLROnPlateau reducing learning rate to 3.0092654708427297e-38.
Epoch 1189/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1190/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1191/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1192/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1193/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1194/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1195/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1196/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1197/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1198/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01198: ReduceLROnPlateau reducing learning rate to 1.5046327354213649e-38.
Epoch 1199/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1200/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1201/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1202/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1203/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1204/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1205/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1206/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1207/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1208/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01208: ReduceLROnPlateau reducing learning rate to 7.523163677106824e-39.
Epoch 1209/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1210/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1211/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1212/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1213/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1214/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1215/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1216/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1217/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1218/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01218: ReduceLROnPlateau reducing learning rate to 3.761581838553412e-39.
Epoch 1219/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1220/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1221/2000
191/191 [==============================] - ETA: 0s - loss: 0.0057 - accuracy: 1.00 - 0s 136us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1222/2000
191/191 [==============================] - 0s 136us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1223/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1224/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1225/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1226/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1227/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1228/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01228: ReduceLROnPlateau reducing learning rate to 1.88079056895209e-39.
Epoch 1229/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1230/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1231/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1232/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1233/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1234/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1235/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1236/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1237/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1238/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01238: ReduceLROnPlateau reducing learning rate to 9.40395284476045e-40.
Epoch 1239/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1240/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1241/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1242/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1243/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1244/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1245/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1246/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1247/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1248/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01248: ReduceLROnPlateau reducing learning rate to 4.701972919134064e-40.
Epoch 1249/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1250/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1251/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1252/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1253/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1254/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1255/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1256/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1257/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1258/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01258: ReduceLROnPlateau reducing learning rate to 2.350986459567032e-40.
Epoch 1259/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1260/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1261/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1262/2000
191/191 [==============================] - ETA: 0s - loss: 0.0072 - accuracy: 1.00 - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1263/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1264/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1265/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1266/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1267/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1268/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01268: ReduceLROnPlateau reducing learning rate to 1.175493229783516e-40.
Epoch 1269/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1270/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1271/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1272/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1273/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1274/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1275/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1276/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1277/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1278/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01278: ReduceLROnPlateau reducing learning rate to 5.87746614891758e-41.
Epoch 1279/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1280/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1281/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1282/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1283/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1284/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1285/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1286/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1287/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1288/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01288: ReduceLROnPlateau reducing learning rate to 2.93873307445879e-41.
Epoch 1289/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1290/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1291/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1292/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1293/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1294/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1295/2000
191/191 [==============================] - ETA: 0s - loss: 0.0085 - accuracy: 1.00 - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1296/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1297/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1298/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01298: ReduceLROnPlateau reducing learning rate to 1.4694015696910032e-41.
Epoch 1299/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1300/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1301/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1302/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1303/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1304/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1305/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1306/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1307/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1308/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01308: ReduceLROnPlateau reducing learning rate to 7.347007848455016e-42.
Epoch 1309/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1310/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1311/2000
191/191 [==============================] - 0s 136us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1312/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1313/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1314/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1315/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1316/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1317/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1318/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01318: ReduceLROnPlateau reducing learning rate to 3.673503924227508e-42.
Epoch 1319/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1320/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1321/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1322/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1323/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1324/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1325/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1326/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1327/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1328/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01328: ReduceLROnPlateau reducing learning rate to 1.8371022867298352e-42.
Epoch 1329/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1330/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1331/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1332/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1333/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1334/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1335/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1336/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1337/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1338/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01338: ReduceLROnPlateau reducing learning rate to 9.185511433649176e-43.
Epoch 1339/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1340/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1341/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1342/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1343/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1344/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1345/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1346/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1347/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1348/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01348: ReduceLROnPlateau reducing learning rate to 4.5962589629854e-43.
Epoch 1349/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1350/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1351/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1352/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1353/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1354/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1355/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1356/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1357/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1358/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01358: ReduceLROnPlateau reducing learning rate to 2.2981294814927e-43.
Epoch 1359/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1360/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1361/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1362/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1363/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1364/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1365/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1366/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1367/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1368/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01368: ReduceLROnPlateau reducing learning rate to 1.14906474074635e-43.
Epoch 1369/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1370/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1371/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1372/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1373/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1374/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1375/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1376/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1377/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1378/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01378: ReduceLROnPlateau reducing learning rate to 5.74532370373175e-44.
Epoch 1379/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1380/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1381/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1382/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1383/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1384/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1385/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1386/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1387/2000
191/191 [==============================] - ETA: 0s - loss: 0.0044 - accuracy: 1.00 - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1388/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01388: ReduceLROnPlateau reducing learning rate to 2.872661851865875e-44.
Epoch 1389/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1390/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1391/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1392/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1393/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1394/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1395/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1396/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1397/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1398/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01398: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-44.
Epoch 1399/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1400/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1401/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1402/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1403/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1404/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1405/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1406/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1407/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1408/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01408: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-45.
Epoch 1409/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1410/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1411/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1412/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1413/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1414/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1415/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1416/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1417/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1418/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01418: ReduceLROnPlateau reducing learning rate to 3.5032461608120427e-45.
Epoch 1419/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1420/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1421/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1422/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1423/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1424/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1425/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1426/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1427/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1428/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01428: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-45.
Epoch 1429/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1430/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1431/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1432/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1433/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1434/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1435/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1436/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1437/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1438/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344

Epoch 01438: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-46.
Epoch 1439/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1440/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1441/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1442/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1443/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1444/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1445/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1446/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1447/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1448/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1449/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1450/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1451/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1452/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1453/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1454/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1455/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1456/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1457/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1458/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1459/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1460/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1461/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1462/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1463/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1464/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1465/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1466/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1467/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1468/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1469/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1470/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1471/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1472/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1473/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1474/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1475/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1476/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1477/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1478/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1479/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1480/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1481/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1482/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1483/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1484/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1485/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1486/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1487/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1488/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1489/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1490/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1491/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1492/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1493/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1494/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1495/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1496/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1497/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1498/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1499/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1500/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1501/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1502/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1503/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1504/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1505/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1506/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1507/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1508/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1509/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1510/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1511/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1512/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1513/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1514/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1515/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1516/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1517/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1518/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1519/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1520/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1521/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1522/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1523/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1524/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1525/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1526/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1527/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1528/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1529/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1530/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1531/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1532/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1533/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1534/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1535/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1536/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1537/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1538/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1539/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1540/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1541/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1542/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1543/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1544/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1545/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1546/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1547/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1548/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1549/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1550/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1551/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1552/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1553/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1554/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1555/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1556/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1557/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1558/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1559/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1560/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1561/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1562/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1563/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1564/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1565/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1566/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1567/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1568/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1569/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1570/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1571/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1572/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1573/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1574/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1575/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1576/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1577/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1578/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1579/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1580/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1581/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1582/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1583/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1584/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1585/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1586/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1587/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1588/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1589/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1590/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1591/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1592/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1593/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1594/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1595/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1596/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1597/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1598/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1599/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1600/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1601/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1602/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1603/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1604/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1605/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1606/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1607/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1608/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1609/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1610/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1611/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1612/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1613/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1614/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1615/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1616/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1617/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1618/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1619/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1620/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1621/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1622/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1623/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1624/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1625/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1626/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1627/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1628/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1629/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1630/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1631/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1632/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1633/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1634/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1635/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1636/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1637/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1638/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1639/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1640/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1641/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1642/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1643/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1644/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1645/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1646/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1647/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1648/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1649/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1650/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1651/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1652/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1653/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1654/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1655/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1656/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1657/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1658/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1659/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1660/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1661/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1662/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1663/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1664/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1665/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1666/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1667/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1668/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1669/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1670/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1671/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1672/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1673/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1674/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1675/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1676/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1677/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1678/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1679/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1680/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1681/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1682/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1683/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1684/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1685/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1686/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1687/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1688/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1689/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1690/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1691/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1692/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1693/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1694/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1695/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1696/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1697/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1698/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1699/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1700/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1701/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1702/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1703/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1704/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1705/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1706/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1707/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1708/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1709/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1710/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1711/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1712/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1713/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1714/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1715/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1716/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1717/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1718/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1719/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1720/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1721/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1722/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1723/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1724/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1725/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1726/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1727/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1728/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1729/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1730/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1731/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1732/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1733/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1734/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1735/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1736/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1737/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1738/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1739/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1740/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1741/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1742/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1743/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1744/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1745/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1746/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1747/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1748/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1749/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1750/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1751/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1752/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1753/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1754/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1755/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1756/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1757/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1758/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1759/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1760/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1761/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1762/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1763/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1764/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1765/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1766/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1767/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1768/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1769/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1770/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1771/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1772/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1773/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1774/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1775/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1776/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1777/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1778/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1779/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1780/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1781/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1782/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1783/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1784/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1785/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1786/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1787/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1788/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1789/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1790/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1791/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1792/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1793/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1794/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1795/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1796/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1797/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1798/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1799/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1800/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1801/2000
191/191 [==============================] - ETA: 0s - loss: 0.0076 - accuracy: 1.00 - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1802/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1803/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1804/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1805/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1806/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1807/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1808/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1809/2000
191/191 [==============================] - 0s 199us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1810/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1811/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1812/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1813/2000
191/191 [==============================] - 0s 79us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1814/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1815/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1816/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1817/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1818/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1819/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1820/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1821/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1822/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1823/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1824/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1825/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1826/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1827/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1828/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1829/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1830/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1831/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1832/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1833/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1834/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1835/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1836/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1837/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1838/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1839/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1840/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1841/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1842/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1843/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1844/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1845/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1846/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1847/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1848/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1849/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1850/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1851/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1852/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1853/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1854/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1855/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1856/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1857/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1858/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1859/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1860/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1861/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1862/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1863/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1864/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1865/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1866/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1867/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1868/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1869/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1870/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1871/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1872/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1873/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1874/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1875/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1876/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1877/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1878/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1879/2000
191/191 [==============================] - 0s 73us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1880/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1881/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1882/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1883/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1884/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1885/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1886/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1887/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1888/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1889/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1890/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1891/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1892/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1893/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1894/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1895/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1896/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1897/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1898/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1899/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1900/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1901/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1902/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1903/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1904/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1905/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1906/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1907/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1908/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1909/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1910/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1911/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1912/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1913/2000
191/191 [==============================] - ETA: 0s - loss: 0.0088 - accuracy: 1.00 - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1914/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1915/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1916/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1917/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1918/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1919/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1920/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1921/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1922/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1923/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1924/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1925/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1926/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1927/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1928/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1929/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1930/2000
191/191 [==============================] - 0s 89us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1931/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1932/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1933/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1934/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1935/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1936/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1937/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1938/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1939/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1940/2000
191/191 [==============================] - 0s 78us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1941/2000
191/191 [==============================] - 0s 84us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1942/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1943/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1944/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1945/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1946/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1947/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1948/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1949/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1950/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1951/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1952/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1953/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1954/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1955/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1956/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1957/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1958/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1959/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1960/2000
191/191 [==============================] - ETA: 0s - loss: 0.0078 - accuracy: 1.00 - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1961/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1962/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1963/2000
191/191 [==============================] - 0s 131us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1964/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1965/2000
191/191 [==============================] - 0s 173us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1966/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1967/2000
191/191 [==============================] - 0s 126us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1968/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1969/2000
191/191 [==============================] - 0s 120us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1970/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1971/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1972/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1973/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1974/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1975/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1976/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1977/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1978/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1979/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1980/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1981/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1982/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1983/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1984/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1985/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1986/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1987/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1988/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1989/2000
191/191 [==============================] - 0s 94us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1990/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1991/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1992/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1993/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1994/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1995/2000
191/191 [==============================] - 0s 99us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1996/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1997/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1998/2000
191/191 [==============================] - 0s 115us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 1999/2000
191/191 [==============================] - 0s 110us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
Epoch 2000/2000
191/191 [==============================] - 0s 105us/step - loss: 0.0053 - accuracy: 1.0000 - val_loss: 1.3282 - val_accuracy: 0.7344
In [203]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
In [204]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step
test loss: 1.3282248377799988, test accuracy: 0.734375
In [205]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.8006362672322375
In [206]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.39420935412026725

KMeans

In [66]:
X
Out[66]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 -0.339415 0.847773 0.497198 -0.389310 1.225458 1.947033 -0.736267 0.492219 0.576682 1.504697 -1.796460 0.724954 0.958600
1 0.587658 -1.195426 0.636375 0.199876 0.765321 0.061181 0.379367 -0.440867 0.232893 1.339920 0.110001 0.807525 0.815678
2 1.465595 -2.307943 0.354567 -0.058273 -1.298853 -0.811453 -1.551580 -3.934320 -1.079432 2.546130 1.421407 0.639359 0.199094
3 0.749403 -1.690498 -0.125200 -1.016135 0.825845 0.271444 -0.104786 -0.992141 0.049182 1.425948 -0.343269 -0.789558 -0.411898
4 -0.280577 0.393332 0.744917 2.411400 -0.777421 -0.420018 1.258355 -1.544565 -0.498071 0.421527 -0.632908 -0.056846 -0.072348
5 -0.158690 0.404891 -0.147920 -0.299241 -0.786974 0.697216 0.290501 0.019739 -1.468086 -0.346174 -0.086965 0.026492 1.019512
6 1.646777 0.772744 -1.425228 -0.562610 -1.556076 0.533289 -0.404271 1.676958 0.979516 0.415548 0.544719 0.433332 0.204271
7 1.124970 0.506236 0.738993 1.984485 -0.928706 -0.494097 -0.707105 -0.494778 -1.642929 0.207467 0.181382 2.431721 0.848697
8 0.920059 1.438862 -2.048354 1.503567 -2.801303 0.567132 -0.745441 0.569519 0.130917 1.965436 -0.034797 1.164878 0.074074
9 0.182544 0.310622 0.067722 0.870138 0.168366 0.682045 -0.191296 -0.144962 -0.630020 -0.284032 -0.315301 0.344841 0.495167
10 0.168663 0.389450 0.034360 1.213392 0.248437 0.870618 -0.460824 -0.174734 -0.710502 -0.228408 -0.265153 0.349416 0.584114
11 0.153010 -0.118336 0.639531 1.504522 0.937909 0.356048 -0.089987 -0.628522 0.064203 0.966049 0.403915 -0.943626 0.173874
12 0.132578 0.261966 -2.871493 -3.398160 -0.256458 1.596532 -0.358711 0.175955 -0.499075 0.949085 2.235525 -0.197712 -0.272366
13 1.094629 0.885150 -1.130672 -0.083270 0.672482 0.750453 -0.863949 0.140540 0.423312 -0.305155 -0.424905 0.318660 0.885900
14 0.771472 0.364448 -0.454696 0.434253 0.912699 0.745924 -0.073390 -0.406473 0.450765 0.323180 -0.458826 -0.132295 0.495454
15 0.677561 0.166795 0.746471 0.075191 0.867924 -1.621678 0.771146 -0.067286 0.557998 -0.093593 0.020233 -0.800013 -0.629188
16 -0.032353 1.227345 -0.188580 0.927210 0.016663 1.001867 -0.473811 0.782387 1.542760 -0.345478 -0.838104 -0.439443 1.179204
17 0.459031 1.258961 -0.329412 1.391790 -0.208888 1.059241 -1.245671 0.619153 0.245780 0.644548 -0.602629 -0.928581 0.739885
18 -0.359172 0.051214 -0.603962 0.778896 1.630471 1.802477 1.486205 -0.140738 -0.894366 0.736624 2.114721 1.078175 -0.965785
19 0.209859 -0.615399 -0.676895 0.735655 0.805509 -0.696793 1.073068 0.240429 -0.205934 -0.759693 0.672843 0.569482 -0.455391
20 0.127381 -0.265099 -0.258801 -0.127568 0.649447 0.244473 1.897421 -0.344616 -0.593159 0.065147 1.787607 1.219355 -0.171813
21 1.222717 0.409860 1.311826 0.703873 0.322062 0.305461 -0.522644 -0.750833 0.001767 0.017953 0.254329 -0.227762 -0.614790
22 1.173352 0.490500 0.742825 -0.028159 -0.272396 -0.502733 -0.759443 -1.031924 -0.157975 0.075659 0.604220 0.143298 -0.001849
23 1.069960 0.858822 -0.795544 0.076688 0.851875 0.735014 -0.758779 0.065595 0.532667 -0.391858 -0.497019 0.240822 0.848126
24 0.581377 -0.804045 0.399887 1.535671 0.245878 0.904192 -0.233991 -0.925983 0.212280 0.499535 -0.024926 -0.925999 1.294925
25 0.161110 0.025075 0.716318 1.532230 0.889883 0.353167 -0.058787 -0.593046 0.093773 0.927085 0.199691 -0.979872 0.232850
26 0.431443 0.442713 0.259120 0.045533 0.102675 0.367606 0.054320 0.942924 0.180609 0.550983 0.265291 0.321252 -0.830969
27 0.344525 -1.140315 -0.725453 -0.547965 0.449924 0.303904 1.053624 1.051712 0.509322 0.181611 -0.519979 -1.134490 -1.439105
28 -0.041565 0.671274 0.195143 0.247294 0.531620 1.050124 0.311358 0.988161 -0.198869 0.387795 1.757366 1.351684 0.194840
29 0.417845 -1.134173 -0.760709 -0.605264 0.077464 0.533333 1.104524 2.124971 0.083548 0.801730 0.092534 -1.281628 -1.468782
... ... ... ... ... ... ... ... ... ... ... ... ... ...
225 1.532114 -1.060006 -0.434145 -0.999435 -1.259462 0.039140 -0.802013 -0.655286 0.714448 1.005958 -0.086372 0.537392 0.054440
226 -0.942320 1.172080 0.506725 -0.230675 -0.104635 0.898742 -1.107001 -1.182148 -0.940991 0.232366 1.778224 0.975251 1.731084
227 1.421974 0.631029 -0.563813 -0.694595 -0.673270 0.929022 0.476907 -1.025173 -0.813644 -0.060006 -0.738730 -0.558099 0.057654
228 -1.473385 -0.806223 1.849423 -1.252541 0.941013 -0.872947 -1.812392 -0.242718 -0.097212 -0.510500 -0.232195 -0.546399 0.945530
229 -1.135926 -0.772372 1.164844 -1.022517 0.630202 -0.496999 -1.101656 -0.168921 -0.295159 -0.587401 0.369033 -0.266325 0.604469
230 -1.085049 0.879566 0.442593 0.128917 0.393498 0.531555 0.392194 1.418515 0.891015 -0.348926 -0.756201 -0.838584 -0.015971
231 -0.352258 0.556982 0.530520 0.443818 0.300921 0.032128 -0.797384 -0.573532 0.398084 0.328875 -0.274964 -1.300920 0.254456
232 -1.190363 0.797356 0.758472 0.587917 0.890540 0.471925 0.105793 0.680721 0.230834 -0.150709 -0.816744 -0.470618 0.371198
233 -0.651003 -0.586618 1.326854 -0.451354 0.507113 0.165474 -0.919675 -0.448249 -1.310940 -1.372737 0.406029 -1.414627 -0.434858
234 -1.459511 -0.516281 1.631699 -1.141842 0.584621 -0.458541 -1.428877 -0.934556 -0.216455 -0.049794 0.095580 0.387068 0.693730
235 -0.726984 0.702447 0.798069 -0.320660 0.530902 1.019988 0.144995 0.207847 0.039592 0.220761 0.762941 0.575034 0.671517
236 -0.300986 -0.404923 0.715406 0.245380 -0.427936 -0.334843 -0.228084 -0.330898 -0.674327 0.199560 0.827455 0.016433 0.866789
237 -0.736244 0.088611 0.910051 0.437100 0.258256 0.363828 -0.415290 -0.717445 -0.012727 0.436925 -0.786954 -1.217376 0.352825
238 0.610473 -2.664315 1.303652 -2.022376 1.500032 -1.280926 -1.249533 0.432111 -0.768558 0.291156 -0.092312 0.053770 -0.401166
239 -2.045424 -2.954642 0.302601 -0.868092 -1.038134 -1.230777 0.514329 0.057591 -1.023895 0.275395 -1.450282 0.386242 0.318763
240 0.329793 -1.367570 -1.454329 -0.207924 -0.723609 -0.149025 -0.085298 -0.011595 -0.240239 -0.009120 -0.325229 -0.025722 0.114182
241 -1.919591 1.382172 -0.134161 0.837967 -0.687780 0.944303 -0.258652 -0.742178 0.386031 -1.178099 -1.843543 -0.710556 -0.318561
242 -2.087669 1.400006 -0.494964 0.451717 -0.759188 0.736625 0.133121 -0.196031 1.121231 0.474128 -0.345937 -0.409324 -0.442069
243 -2.131652 0.439305 -0.612226 0.854126 -0.494550 0.825299 0.301373 -0.018964 0.690556 -0.078762 -0.709495 -0.075857 -0.418656
244 -1.611989 -0.756403 -0.410917 1.075909 0.297336 -1.317576 1.115011 -0.467065 -0.768378 1.615499 1.611125 -1.018782 -1.798744
245 -0.142010 0.000190 -0.063461 -0.506353 -0.386942 -0.256144 0.270621 -1.497417 0.507892 0.456828 -0.431169 -0.978417 0.015849
246 -1.263975 -1.168117 -1.396090 -0.312016 1.862268 1.400290 0.646060 -0.686864 0.418524 -0.069926 -0.653856 -0.853617 -0.106814
247 -0.507700 0.899825 1.510153 1.083642 2.081451 0.589016 0.901321 0.658808 0.152596 0.176442 -0.447633 0.287838 0.650479
248 -0.159768 0.518093 2.197018 0.698491 0.476336 -2.014255 -1.614667 -0.397282 -1.781932 -0.208894 1.650551 -0.771436 -0.987237
249 -1.037899 1.016712 2.774230 0.665468 -0.385673 0.587263 -0.121609 -0.331379 0.622484 -0.387131 -0.276584 0.218207 1.689216
250 -0.526923 -1.169944 0.474875 -0.789231 0.369827 -0.537003 -1.089843 -0.173366 -0.023237 -0.142334 0.740065 0.813114 0.872556
251 -0.770856 -1.024349 -0.019140 -0.097521 0.092703 0.369242 -0.273901 0.190740 -0.074032 0.113055 0.140291 -0.696275 0.166679
252 -0.905458 -0.790575 0.206164 -0.723816 -0.444860 0.107833 -0.734514 -0.533865 -0.634334 0.320526 0.088428 -0.348210 0.347201
253 -1.378235 -0.338405 0.016815 -0.394563 0.034043 1.023865 -0.303960 -1.316121 0.198697 0.670577 0.809574 0.580565 0.056004
254 -0.199959 -2.035812 -0.904507 -1.511975 -0.437843 0.262972 -1.943788 -1.963300 -2.256227 0.354369 -0.039829 0.882325 0.139307

255 rows × 13 columns

In [67]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[67]:
[3315.0,
 2972.7888695817974,
 2748.18187155972,
 2544.9420084212106,
 2413.687059384553,
 2278.037996783226,
 2213.3487507256823,
 2123.4282707474663,
 2067.8299633414163,
 1977.777252698108,
 1956.5229777214513,
 1880.0296166971755,
 1815.5096049846275,
 1785.9955747862728]
In [68]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[68]:
[<matplotlib.lines.Line2D at 0x1e82ae84f98>]

K=6

In [69]:
kmeans_mfcc = KMeans(n_clusters=6, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[69]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=6, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [70]:
kmeans_mfcc.labels_
Out[70]:
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
       0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
       2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
       3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
       4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
       1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
       4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
       4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
       4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
       4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
       1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
       4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
In [71]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[71]:
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
       0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
       2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
       3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
       4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
       1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
       4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
       4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
       4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
       4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
       1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
       4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
In [72]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [73]:
X
Out[73]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 -0.339415 0.847773 0.497198 -0.389310 1.225458 1.947033 -0.736267 0.492219 0.576682 1.504697 -1.796460 0.724954 0.958600 4 0
1 0.587658 -1.195426 0.636375 0.199876 0.765321 0.061181 0.379367 -0.440867 0.232893 1.339920 0.110001 0.807525 0.815678 2 0
2 1.465595 -2.307943 0.354567 -0.058273 -1.298853 -0.811453 -1.551580 -3.934320 -1.079432 2.546130 1.421407 0.639359 0.199094 2 0
3 0.749403 -1.690498 -0.125200 -1.016135 0.825845 0.271444 -0.104786 -0.992141 0.049182 1.425948 -0.343269 -0.789558 -0.411898 2 0
4 -0.280577 0.393332 0.744917 2.411400 -0.777421 -0.420018 1.258355 -1.544565 -0.498071 0.421527 -0.632908 -0.056846 -0.072348 0 0
5 -0.158690 0.404891 -0.147920 -0.299241 -0.786974 0.697216 0.290501 0.019739 -1.468086 -0.346174 -0.086965 0.026492 1.019512 1 0
6 1.646777 0.772744 -1.425228 -0.562610 -1.556076 0.533289 -0.404271 1.676958 0.979516 0.415548 0.544719 0.433332 0.204271 1 0
7 1.124970 0.506236 0.738993 1.984485 -0.928706 -0.494097 -0.707105 -0.494778 -1.642929 0.207467 0.181382 2.431721 0.848697 0 0
8 0.920059 1.438862 -2.048354 1.503567 -2.801303 0.567132 -0.745441 0.569519 0.130917 1.965436 -0.034797 1.164878 0.074074 1 0
9 0.182544 0.310622 0.067722 0.870138 0.168366 0.682045 -0.191296 -0.144962 -0.630020 -0.284032 -0.315301 0.344841 0.495167 4 0
10 0.168663 0.389450 0.034360 1.213392 0.248437 0.870618 -0.460824 -0.174734 -0.710502 -0.228408 -0.265153 0.349416 0.584114 4 0
11 0.153010 -0.118336 0.639531 1.504522 0.937909 0.356048 -0.089987 -0.628522 0.064203 0.966049 0.403915 -0.943626 0.173874 4 0
12 0.132578 0.261966 -2.871493 -3.398160 -0.256458 1.596532 -0.358711 0.175955 -0.499075 0.949085 2.235525 -0.197712 -0.272366 1 0
13 1.094629 0.885150 -1.130672 -0.083270 0.672482 0.750453 -0.863949 0.140540 0.423312 -0.305155 -0.424905 0.318660 0.885900 4 0
14 0.771472 0.364448 -0.454696 0.434253 0.912699 0.745924 -0.073390 -0.406473 0.450765 0.323180 -0.458826 -0.132295 0.495454 4 0
15 0.677561 0.166795 0.746471 0.075191 0.867924 -1.621678 0.771146 -0.067286 0.557998 -0.093593 0.020233 -0.800013 -0.629188 3 0
16 -0.032353 1.227345 -0.188580 0.927210 0.016663 1.001867 -0.473811 0.782387 1.542760 -0.345478 -0.838104 -0.439443 1.179204 4 0
17 0.459031 1.258961 -0.329412 1.391790 -0.208888 1.059241 -1.245671 0.619153 0.245780 0.644548 -0.602629 -0.928581 0.739885 4 0
18 -0.359172 0.051214 -0.603962 0.778896 1.630471 1.802477 1.486205 -0.140738 -0.894366 0.736624 2.114721 1.078175 -0.965785 4 0
19 0.209859 -0.615399 -0.676895 0.735655 0.805509 -0.696793 1.073068 0.240429 -0.205934 -0.759693 0.672843 0.569482 -0.455391 4 0
20 0.127381 -0.265099 -0.258801 -0.127568 0.649447 0.244473 1.897421 -0.344616 -0.593159 0.065147 1.787607 1.219355 -0.171813 4 0
21 1.222717 0.409860 1.311826 0.703873 0.322062 0.305461 -0.522644 -0.750833 0.001767 0.017953 0.254329 -0.227762 -0.614790 0 0
22 1.173352 0.490500 0.742825 -0.028159 -0.272396 -0.502733 -0.759443 -1.031924 -0.157975 0.075659 0.604220 0.143298 -0.001849 0 0
23 1.069960 0.858822 -0.795544 0.076688 0.851875 0.735014 -0.758779 0.065595 0.532667 -0.391858 -0.497019 0.240822 0.848126 4 0
24 0.581377 -0.804045 0.399887 1.535671 0.245878 0.904192 -0.233991 -0.925983 0.212280 0.499535 -0.024926 -0.925999 1.294925 4 0
25 0.161110 0.025075 0.716318 1.532230 0.889883 0.353167 -0.058787 -0.593046 0.093773 0.927085 0.199691 -0.979872 0.232850 4 0
26 0.431443 0.442713 0.259120 0.045533 0.102675 0.367606 0.054320 0.942924 0.180609 0.550983 0.265291 0.321252 -0.830969 4 0
27 0.344525 -1.140315 -0.725453 -0.547965 0.449924 0.303904 1.053624 1.051712 0.509322 0.181611 -0.519979 -1.134490 -1.439105 3 0
28 -0.041565 0.671274 0.195143 0.247294 0.531620 1.050124 0.311358 0.988161 -0.198869 0.387795 1.757366 1.351684 0.194840 4 0
29 0.417845 -1.134173 -0.760709 -0.605264 0.077464 0.533333 1.104524 2.124971 0.083548 0.801730 0.092534 -1.281628 -1.468782 3 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
225 1.532114 -1.060006 -0.434145 -0.999435 -1.259462 0.039140 -0.802013 -0.655286 0.714448 1.005958 -0.086372 0.537392 0.054440 1 1
226 -0.942320 1.172080 0.506725 -0.230675 -0.104635 0.898742 -1.107001 -1.182148 -0.940991 0.232366 1.778224 0.975251 1.731084 0 1
227 1.421974 0.631029 -0.563813 -0.694595 -0.673270 0.929022 0.476907 -1.025173 -0.813644 -0.060006 -0.738730 -0.558099 0.057654 1 1
228 -1.473385 -0.806223 1.849423 -1.252541 0.941013 -0.872947 -1.812392 -0.242718 -0.097212 -0.510500 -0.232195 -0.546399 0.945530 2 1
229 -1.135926 -0.772372 1.164844 -1.022517 0.630202 -0.496999 -1.101656 -0.168921 -0.295159 -0.587401 0.369033 -0.266325 0.604469 2 1
230 -1.085049 0.879566 0.442593 0.128917 0.393498 0.531555 0.392194 1.418515 0.891015 -0.348926 -0.756201 -0.838584 -0.015971 3 1
231 -0.352258 0.556982 0.530520 0.443818 0.300921 0.032128 -0.797384 -0.573532 0.398084 0.328875 -0.274964 -1.300920 0.254456 0 1
232 -1.190363 0.797356 0.758472 0.587917 0.890540 0.471925 0.105793 0.680721 0.230834 -0.150709 -0.816744 -0.470618 0.371198 4 1
233 -0.651003 -0.586618 1.326854 -0.451354 0.507113 0.165474 -0.919675 -0.448249 -1.310940 -1.372737 0.406029 -1.414627 -0.434858 2 1
234 -1.459511 -0.516281 1.631699 -1.141842 0.584621 -0.458541 -1.428877 -0.934556 -0.216455 -0.049794 0.095580 0.387068 0.693730 2 1
235 -0.726984 0.702447 0.798069 -0.320660 0.530902 1.019988 0.144995 0.207847 0.039592 0.220761 0.762941 0.575034 0.671517 4 1
236 -0.300986 -0.404923 0.715406 0.245380 -0.427936 -0.334843 -0.228084 -0.330898 -0.674327 0.199560 0.827455 0.016433 0.866789 0 1
237 -0.736244 0.088611 0.910051 0.437100 0.258256 0.363828 -0.415290 -0.717445 -0.012727 0.436925 -0.786954 -1.217376 0.352825 2 1
238 0.610473 -2.664315 1.303652 -2.022376 1.500032 -1.280926 -1.249533 0.432111 -0.768558 0.291156 -0.092312 0.053770 -0.401166 2 1
239 -2.045424 -2.954642 0.302601 -0.868092 -1.038134 -1.230777 0.514329 0.057591 -1.023895 0.275395 -1.450282 0.386242 0.318763 2 1
240 0.329793 -1.367570 -1.454329 -0.207924 -0.723609 -0.149025 -0.085298 -0.011595 -0.240239 -0.009120 -0.325229 -0.025722 0.114182 1 1
241 -1.919591 1.382172 -0.134161 0.837967 -0.687780 0.944303 -0.258652 -0.742178 0.386031 -1.178099 -1.843543 -0.710556 -0.318561 3 1
242 -2.087669 1.400006 -0.494964 0.451717 -0.759188 0.736625 0.133121 -0.196031 1.121231 0.474128 -0.345937 -0.409324 -0.442069 4 1
243 -2.131652 0.439305 -0.612226 0.854126 -0.494550 0.825299 0.301373 -0.018964 0.690556 -0.078762 -0.709495 -0.075857 -0.418656 4 1
244 -1.611989 -0.756403 -0.410917 1.075909 0.297336 -1.317576 1.115011 -0.467065 -0.768378 1.615499 1.611125 -1.018782 -1.798744 2 1
245 -0.142010 0.000190 -0.063461 -0.506353 -0.386942 -0.256144 0.270621 -1.497417 0.507892 0.456828 -0.431169 -0.978417 0.015849 2 1
246 -1.263975 -1.168117 -1.396090 -0.312016 1.862268 1.400290 0.646060 -0.686864 0.418524 -0.069926 -0.653856 -0.853617 -0.106814 2 1
247 -0.507700 0.899825 1.510153 1.083642 2.081451 0.589016 0.901321 0.658808 0.152596 0.176442 -0.447633 0.287838 0.650479 4 1
248 -0.159768 0.518093 2.197018 0.698491 0.476336 -2.014255 -1.614667 -0.397282 -1.781932 -0.208894 1.650551 -0.771436 -0.987237 0 1
249 -1.037899 1.016712 2.774230 0.665468 -0.385673 0.587263 -0.121609 -0.331379 0.622484 -0.387131 -0.276584 0.218207 1.689216 0 1
250 -0.526923 -1.169944 0.474875 -0.789231 0.369827 -0.537003 -1.089843 -0.173366 -0.023237 -0.142334 0.740065 0.813114 0.872556 2 1
251 -0.770856 -1.024349 -0.019140 -0.097521 0.092703 0.369242 -0.273901 0.190740 -0.074032 0.113055 0.140291 -0.696275 0.166679 2 1
252 -0.905458 -0.790575 0.206164 -0.723816 -0.444860 0.107833 -0.734514 -0.533865 -0.634334 0.320526 0.088428 -0.348210 0.347201 2 1
253 -1.378235 -0.338405 0.016815 -0.394563 0.034043 1.023865 -0.303960 -1.316121 0.198697 0.670577 0.809574 0.580565 0.056004 2 1
254 -0.199959 -2.035812 -0.904507 -1.511975 -0.437843 0.262972 -1.943788 -1.963300 -2.256227 0.354369 -0.039829 0.882325 0.139307 2 1

255 rows × 15 columns

In [74]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[74]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82aec1940>
In [207]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[2]))

Gramma

ANN

In [208]:
X = df_n_ps_std_mfcc[2]
In [209]:
y = df_n_ps[2]['chosen']
In [210]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [211]:
X_train.shape
Out[211]:
(231, 13)
In [80]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [81]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [82]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [83]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30,), 'learning_rate_init': 0.001, 'max_iter': 1000}, que permiten obtener un Accuracy de 84.42% y un Kappa del 45.84
Tiempo total: 27.81 minutos
In [212]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.001
epochs = 1000
In [213]:
input_tensor = Input(shape = (n0,))
In [214]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [215]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [216]:
model.summary()
Model: "model_8"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_8 (InputLayer)         (None, 13)                0         
_________________________________________________________________
dense_23 (Dense)             (None, 30)                420       
_________________________________________________________________
dense_24 (Dense)             (None, 1)                 31        
=================================================================
Total params: 451
Trainable params: 451
Non-trainable params: 0
_________________________________________________________________
In [217]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 231 samples, validate on 78 samples
Epoch 1/1000
231/231 [==============================] - 0s 653us/step - loss: 0.6856 - accuracy: 0.5628 - val_loss: 0.6899 - val_accuracy: 0.5769
Epoch 2/1000
231/231 [==============================] - 0s 52us/step - loss: 0.6619 - accuracy: 0.6147 - val_loss: 0.6783 - val_accuracy: 0.6154
Epoch 3/1000
231/231 [==============================] - 0s 52us/step - loss: 0.6436 - accuracy: 0.6623 - val_loss: 0.6683 - val_accuracy: 0.6026
Epoch 4/1000
231/231 [==============================] - 0s 82us/step - loss: 0.6261 - accuracy: 0.6840 - val_loss: 0.6598 - val_accuracy: 0.6154
Epoch 5/1000
231/231 [==============================] - 0s 56us/step - loss: 0.6105 - accuracy: 0.7273 - val_loss: 0.6508 - val_accuracy: 0.6282
Epoch 6/1000
231/231 [==============================] - 0s 56us/step - loss: 0.5965 - accuracy: 0.7489 - val_loss: 0.6424 - val_accuracy: 0.6154
Epoch 7/1000
231/231 [==============================] - 0s 56us/step - loss: 0.5832 - accuracy: 0.7576 - val_loss: 0.6345 - val_accuracy: 0.6410
Epoch 8/1000
231/231 [==============================] - 0s 52us/step - loss: 0.5711 - accuracy: 0.7532 - val_loss: 0.6278 - val_accuracy: 0.6667
Epoch 9/1000
231/231 [==============================] - 0s 52us/step - loss: 0.5596 - accuracy: 0.7446 - val_loss: 0.6198 - val_accuracy: 0.6667
Epoch 10/1000
231/231 [==============================] - 0s 56us/step - loss: 0.5479 - accuracy: 0.7619 - val_loss: 0.6118 - val_accuracy: 0.6923
Epoch 11/1000
231/231 [==============================] - 0s 65us/step - loss: 0.5373 - accuracy: 0.7662 - val_loss: 0.6053 - val_accuracy: 0.6923
Epoch 12/1000
231/231 [==============================] - 0s 56us/step - loss: 0.5275 - accuracy: 0.7706 - val_loss: 0.5994 - val_accuracy: 0.7051
Epoch 13/1000
231/231 [==============================] - 0s 69us/step - loss: 0.5175 - accuracy: 0.7749 - val_loss: 0.5939 - val_accuracy: 0.7179
Epoch 14/1000
231/231 [==============================] - 0s 69us/step - loss: 0.5082 - accuracy: 0.7922 - val_loss: 0.5909 - val_accuracy: 0.7051
Epoch 15/1000
231/231 [==============================] - 0s 82us/step - loss: 0.4998 - accuracy: 0.8009 - val_loss: 0.5871 - val_accuracy: 0.6795
Epoch 16/1000
231/231 [==============================] - 0s 95us/step - loss: 0.4915 - accuracy: 0.8052 - val_loss: 0.5853 - val_accuracy: 0.7051
Epoch 17/1000
231/231 [==============================] - 0s 104us/step - loss: 0.4826 - accuracy: 0.8052 - val_loss: 0.5799 - val_accuracy: 0.7179
Epoch 18/1000
231/231 [==============================] - 0s 91us/step - loss: 0.4751 - accuracy: 0.8095 - val_loss: 0.5749 - val_accuracy: 0.7179
Epoch 19/1000
231/231 [==============================] - 0s 91us/step - loss: 0.4677 - accuracy: 0.8052 - val_loss: 0.5729 - val_accuracy: 0.7179
Epoch 20/1000
231/231 [==============================] - 0s 78us/step - loss: 0.4604 - accuracy: 0.8052 - val_loss: 0.5686 - val_accuracy: 0.7179
Epoch 21/1000
231/231 [==============================] - 0s 87us/step - loss: 0.4529 - accuracy: 0.8139 - val_loss: 0.5653 - val_accuracy: 0.7308
Epoch 22/1000
231/231 [==============================] - 0s 82us/step - loss: 0.4467 - accuracy: 0.8268 - val_loss: 0.5641 - val_accuracy: 0.7179
Epoch 23/1000
231/231 [==============================] - 0s 65us/step - loss: 0.4392 - accuracy: 0.8398 - val_loss: 0.5616 - val_accuracy: 0.6923
Epoch 24/1000
231/231 [==============================] - 0s 56us/step - loss: 0.4332 - accuracy: 0.8571 - val_loss: 0.5597 - val_accuracy: 0.6923
Epoch 25/1000
231/231 [==============================] - 0s 61us/step - loss: 0.4273 - accuracy: 0.8571 - val_loss: 0.5587 - val_accuracy: 0.7051
Epoch 26/1000
231/231 [==============================] - 0s 69us/step - loss: 0.4217 - accuracy: 0.8571 - val_loss: 0.5558 - val_accuracy: 0.7051
Epoch 27/1000
231/231 [==============================] - 0s 65us/step - loss: 0.4169 - accuracy: 0.8615 - val_loss: 0.5539 - val_accuracy: 0.7051
Epoch 28/1000
231/231 [==============================] - 0s 56us/step - loss: 0.4122 - accuracy: 0.8485 - val_loss: 0.5520 - val_accuracy: 0.7179
Epoch 29/1000
231/231 [==============================] - 0s 56us/step - loss: 0.4079 - accuracy: 0.8571 - val_loss: 0.5550 - val_accuracy: 0.7051
Epoch 30/1000
231/231 [==============================] - 0s 61us/step - loss: 0.4040 - accuracy: 0.8571 - val_loss: 0.5576 - val_accuracy: 0.6923
Epoch 31/1000
231/231 [==============================] - 0s 56us/step - loss: 0.4000 - accuracy: 0.8615 - val_loss: 0.5584 - val_accuracy: 0.6923

Epoch 00031: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 32/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3969 - accuracy: 0.8571 - val_loss: 0.5593 - val_accuracy: 0.6923
Epoch 33/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3953 - accuracy: 0.8615 - val_loss: 0.5589 - val_accuracy: 0.6923
Epoch 34/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3937 - accuracy: 0.8615 - val_loss: 0.5598 - val_accuracy: 0.6923
Epoch 35/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3923 - accuracy: 0.8571 - val_loss: 0.5604 - val_accuracy: 0.6923
Epoch 36/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3907 - accuracy: 0.8571 - val_loss: 0.5606 - val_accuracy: 0.6923
Epoch 37/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3892 - accuracy: 0.8528 - val_loss: 0.5614 - val_accuracy: 0.6923
Epoch 38/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3876 - accuracy: 0.8528 - val_loss: 0.5614 - val_accuracy: 0.6923
Epoch 39/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3861 - accuracy: 0.8485 - val_loss: 0.5616 - val_accuracy: 0.6923
Epoch 40/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3845 - accuracy: 0.8485 - val_loss: 0.5615 - val_accuracy: 0.6923
Epoch 41/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3831 - accuracy: 0.8528 - val_loss: 0.5625 - val_accuracy: 0.6795

Epoch 00041: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 42/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3820 - accuracy: 0.8528 - val_loss: 0.5632 - val_accuracy: 0.6795
Epoch 43/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3814 - accuracy: 0.8528 - val_loss: 0.5635 - val_accuracy: 0.6795
Epoch 44/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3807 - accuracy: 0.8528 - val_loss: 0.5641 - val_accuracy: 0.6795
Epoch 45/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3802 - accuracy: 0.8571 - val_loss: 0.5642 - val_accuracy: 0.6923
Epoch 46/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3795 - accuracy: 0.8571 - val_loss: 0.5645 - val_accuracy: 0.6923
Epoch 47/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3789 - accuracy: 0.8571 - val_loss: 0.5648 - val_accuracy: 0.6923
Epoch 48/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3783 - accuracy: 0.8571 - val_loss: 0.5647 - val_accuracy: 0.7051
Epoch 49/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3776 - accuracy: 0.8571 - val_loss: 0.5655 - val_accuracy: 0.6923
Epoch 50/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3770 - accuracy: 0.8571 - val_loss: 0.5662 - val_accuracy: 0.6923
Epoch 51/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3764 - accuracy: 0.8571 - val_loss: 0.5668 - val_accuracy: 0.6923

Epoch 00051: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 52/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3759 - accuracy: 0.8571 - val_loss: 0.5670 - val_accuracy: 0.6923
Epoch 53/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3756 - accuracy: 0.8571 - val_loss: 0.5673 - val_accuracy: 0.6923
Epoch 54/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3753 - accuracy: 0.8571 - val_loss: 0.5675 - val_accuracy: 0.6923
Epoch 55/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3750 - accuracy: 0.8571 - val_loss: 0.5673 - val_accuracy: 0.6923
Epoch 56/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3747 - accuracy: 0.8571 - val_loss: 0.5673 - val_accuracy: 0.6923
Epoch 57/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3745 - accuracy: 0.8571 - val_loss: 0.5674 - val_accuracy: 0.6923
Epoch 58/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3742 - accuracy: 0.8528 - val_loss: 0.5674 - val_accuracy: 0.6923
Epoch 59/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3740 - accuracy: 0.8571 - val_loss: 0.5677 - val_accuracy: 0.6923
Epoch 60/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3736 - accuracy: 0.8528 - val_loss: 0.5680 - val_accuracy: 0.6923
Epoch 61/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3734 - accuracy: 0.8571 - val_loss: 0.5679 - val_accuracy: 0.6923

Epoch 00061: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 62/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3732 - accuracy: 0.8571 - val_loss: 0.5679 - val_accuracy: 0.6923
Epoch 63/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3731 - accuracy: 0.8571 - val_loss: 0.5680 - val_accuracy: 0.6923
Epoch 64/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3730 - accuracy: 0.8571 - val_loss: 0.5681 - val_accuracy: 0.6923
Epoch 65/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3728 - accuracy: 0.8571 - val_loss: 0.5680 - val_accuracy: 0.6923
Epoch 66/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3727 - accuracy: 0.8571 - val_loss: 0.5682 - val_accuracy: 0.6923
Epoch 67/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3726 - accuracy: 0.8571 - val_loss: 0.5684 - val_accuracy: 0.6923
Epoch 68/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3724 - accuracy: 0.8571 - val_loss: 0.5686 - val_accuracy: 0.6923
Epoch 69/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3723 - accuracy: 0.8571 - val_loss: 0.5686 - val_accuracy: 0.6923
Epoch 70/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3721 - accuracy: 0.8571 - val_loss: 0.5688 - val_accuracy: 0.6923
Epoch 71/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3720 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923

Epoch 00071: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 72/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3719 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923
Epoch 73/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3718 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923
Epoch 74/1000
231/231 [==============================] - 0s 121us/step - loss: 0.3718 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923
Epoch 75/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3717 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923
Epoch 76/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3716 - accuracy: 0.8571 - val_loss: 0.5689 - val_accuracy: 0.6923
Epoch 77/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3715 - accuracy: 0.8571 - val_loss: 0.5690 - val_accuracy: 0.6923
Epoch 78/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3715 - accuracy: 0.8571 - val_loss: 0.5692 - val_accuracy: 0.6923
Epoch 79/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3714 - accuracy: 0.8571 - val_loss: 0.5692 - val_accuracy: 0.6923
Epoch 80/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3714 - accuracy: 0.8571 - val_loss: 0.5693 - val_accuracy: 0.6923
Epoch 81/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3713 - accuracy: 0.8571 - val_loss: 0.5693 - val_accuracy: 0.6923

Epoch 00081: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 82/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3712 - accuracy: 0.8571 - val_loss: 0.5693 - val_accuracy: 0.6923
Epoch 83/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3712 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 84/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3712 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 85/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3711 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 86/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3711 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 87/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3711 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 88/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3711 - accuracy: 0.8571 - val_loss: 0.5694 - val_accuracy: 0.6923
Epoch 89/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3710 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 90/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3710 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 91/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923

Epoch 00091: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06.
Epoch 92/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 93/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 94/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 95/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 96/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3709 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 97/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 98/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 99/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 100/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 101/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923

Epoch 00101: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06.
Epoch 102/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 103/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 104/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3708 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 105/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 106/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 107/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 108/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 109/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 110/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 111/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923

Epoch 00111: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06.
Epoch 112/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 113/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 114/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 115/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 116/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 117/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 118/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 119/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 120/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923
Epoch 121/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5695 - val_accuracy: 0.6923

Epoch 00121: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07.
Epoch 122/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 123/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 124/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3707 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 125/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 126/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 127/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 128/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 129/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 130/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 131/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00131: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07.
Epoch 132/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 133/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 134/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 135/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 136/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 137/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 138/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 139/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 140/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 141/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00141: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07.
Epoch 142/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 143/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 144/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 145/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 146/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 147/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 148/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 149/1000
231/231 [==============================] - ETA: 0s - loss: 0.3757 - accuracy: 0.87 - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 150/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 151/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00151: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07.
Epoch 152/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 153/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 154/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 155/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 156/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 157/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 158/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 159/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 160/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 161/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00161: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08.
Epoch 162/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 163/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 164/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 165/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 166/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 167/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 168/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 169/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 170/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 171/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00171: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08.
Epoch 172/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 173/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 174/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 175/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 176/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 177/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 178/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 179/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 180/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 181/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00181: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08.
Epoch 182/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 183/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 184/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 185/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 186/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 187/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 188/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 189/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 190/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 191/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00191: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09.
Epoch 192/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 193/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 194/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 195/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 196/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 197/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 198/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 199/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 200/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 201/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00201: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09.
Epoch 202/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 203/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 204/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 205/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 206/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 207/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 208/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 209/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 210/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 211/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00211: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09.
Epoch 212/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 213/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 214/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 215/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 216/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 217/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 218/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 219/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 220/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 221/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00221: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10.
Epoch 222/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 223/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 224/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 225/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 226/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 227/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 228/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 229/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 230/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 231/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00231: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10.
Epoch 232/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 233/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 234/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 235/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 236/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 237/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 238/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 239/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 240/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 241/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00241: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10.
Epoch 242/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 243/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 244/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 245/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 246/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 247/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 248/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 249/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 250/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 251/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00251: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10.
Epoch 252/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 253/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 254/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 255/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 256/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 257/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 258/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 259/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 260/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 261/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00261: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11.
Epoch 262/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 263/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 264/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 265/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 266/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 267/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 268/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 269/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 270/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 271/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00271: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11.
Epoch 272/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 273/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 274/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 275/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 276/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 277/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 278/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 279/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 280/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 281/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00281: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11.
Epoch 282/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 283/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 284/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 285/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 286/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 287/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 288/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 289/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 290/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 291/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00291: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12.
Epoch 292/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 293/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 294/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 295/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 296/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 297/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 298/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 299/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 300/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 301/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00301: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12.
Epoch 302/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 303/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 304/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 305/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 306/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 307/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 308/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 309/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 310/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 311/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00311: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12.
Epoch 312/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 313/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 314/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 315/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 316/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 317/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 318/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 319/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 320/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 321/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00321: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13.
Epoch 322/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 323/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 324/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 325/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 326/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 327/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 328/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 329/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 330/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 331/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00331: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13.
Epoch 332/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 333/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 334/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 335/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 336/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 337/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 338/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 339/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 340/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 341/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00341: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13.
Epoch 342/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 343/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 344/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 345/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 346/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 347/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 348/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 349/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 350/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 351/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00351: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13.
Epoch 352/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 353/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 354/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 355/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 356/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 357/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 358/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 359/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 360/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 361/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00361: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14.
Epoch 362/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 363/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 364/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 365/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 366/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 367/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 368/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 369/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 370/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 371/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00371: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14.
Epoch 372/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 373/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 374/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 375/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 376/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 377/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 378/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 379/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 380/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 381/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00381: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14.
Epoch 382/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 383/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 384/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 385/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 386/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 387/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 388/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 389/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 390/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 391/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00391: ReduceLROnPlateau reducing learning rate to 7.275957959772868e-15.
Epoch 392/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 393/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 394/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 395/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 396/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 397/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 398/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 399/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 400/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 401/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00401: ReduceLROnPlateau reducing learning rate to 3.637978979886434e-15.
Epoch 402/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 403/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 404/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 405/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 406/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 407/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 408/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 409/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 410/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 411/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00411: ReduceLROnPlateau reducing learning rate to 1.818989489943217e-15.
Epoch 412/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 413/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 414/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 415/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 416/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 417/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 418/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 419/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 420/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 421/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00421: ReduceLROnPlateau reducing learning rate to 9.094947449716085e-16.
Epoch 422/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 423/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 424/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 425/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 426/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 427/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 428/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 429/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 430/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 431/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00431: ReduceLROnPlateau reducing learning rate to 4.547473724858043e-16.
Epoch 432/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 433/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 434/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 435/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 436/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 437/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 438/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 439/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 440/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 441/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00441: ReduceLROnPlateau reducing learning rate to 2.2737368624290214e-16.
Epoch 442/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 443/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 444/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 445/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 446/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 447/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 448/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 449/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 450/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 451/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00451: ReduceLROnPlateau reducing learning rate to 1.1368684312145107e-16.
Epoch 452/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 453/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 454/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 455/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 456/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 457/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 458/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 459/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 460/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 461/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00461: ReduceLROnPlateau reducing learning rate to 5.684342156072553e-17.
Epoch 462/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 463/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 464/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 465/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 466/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 467/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 468/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 469/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 470/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 471/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00471: ReduceLROnPlateau reducing learning rate to 2.842171078036277e-17.
Epoch 472/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 473/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 474/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 475/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 476/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 477/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 478/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 479/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 480/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 481/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00481: ReduceLROnPlateau reducing learning rate to 1.4210855390181384e-17.
Epoch 482/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 483/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 484/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 485/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 486/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 487/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 488/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 489/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 490/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 491/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00491: ReduceLROnPlateau reducing learning rate to 7.105427695090692e-18.
Epoch 492/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 493/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 494/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 495/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 496/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 497/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 498/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 499/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 500/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 501/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00501: ReduceLROnPlateau reducing learning rate to 3.552713847545346e-18.
Epoch 502/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 503/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 504/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 505/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 506/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 507/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 508/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 509/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 510/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 511/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00511: ReduceLROnPlateau reducing learning rate to 1.776356923772673e-18.
Epoch 512/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 513/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 514/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 515/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 516/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 517/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 518/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 519/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 520/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 521/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00521: ReduceLROnPlateau reducing learning rate to 8.881784618863365e-19.
Epoch 522/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 523/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 524/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 525/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 526/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 527/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 528/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 529/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 530/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 531/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00531: ReduceLROnPlateau reducing learning rate to 4.440892309431682e-19.
Epoch 532/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 533/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 534/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 535/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 536/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 537/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 538/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 539/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 540/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 541/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00541: ReduceLROnPlateau reducing learning rate to 2.220446154715841e-19.
Epoch 542/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 543/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 544/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 545/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 546/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 547/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 548/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 549/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 550/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 551/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00551: ReduceLROnPlateau reducing learning rate to 1.1102230773579206e-19.
Epoch 552/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 553/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 554/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 555/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 556/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 557/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 558/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 559/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 560/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 561/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00561: ReduceLROnPlateau reducing learning rate to 5.551115386789603e-20.
Epoch 562/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 563/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 564/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 565/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 566/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 567/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 568/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 569/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 570/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 571/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00571: ReduceLROnPlateau reducing learning rate to 2.7755576933948015e-20.
Epoch 572/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 573/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 574/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 575/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 576/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 577/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 578/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 579/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 580/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 581/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00581: ReduceLROnPlateau reducing learning rate to 1.3877788466974007e-20.
Epoch 582/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 583/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 584/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 585/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 586/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 587/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 588/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 589/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 590/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 591/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00591: ReduceLROnPlateau reducing learning rate to 6.938894233487004e-21.
Epoch 592/1000
231/231 [==============================] - ETA: 0s - loss: 0.3154 - accuracy: 0.93 - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 593/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 594/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 595/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 596/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 597/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 598/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 599/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 600/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 601/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00601: ReduceLROnPlateau reducing learning rate to 3.469447116743502e-21.
Epoch 602/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 603/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 604/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 605/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 606/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 607/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 608/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 609/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 610/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 611/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00611: ReduceLROnPlateau reducing learning rate to 1.734723558371751e-21.
Epoch 612/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 613/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 614/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 615/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 616/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 617/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 618/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 619/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 620/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 621/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00621: ReduceLROnPlateau reducing learning rate to 8.673617791858755e-22.
Epoch 622/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 623/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 624/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 625/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 626/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 627/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 628/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 629/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 630/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 631/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00631: ReduceLROnPlateau reducing learning rate to 4.336808895929377e-22.
Epoch 632/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 633/1000
231/231 [==============================] - 0s 48us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 634/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 635/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 636/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 637/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 638/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 639/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 640/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 641/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00641: ReduceLROnPlateau reducing learning rate to 2.1684044479646887e-22.
Epoch 642/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 643/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 644/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 645/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 646/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 647/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 648/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 649/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 650/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 651/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00651: ReduceLROnPlateau reducing learning rate to 1.0842022239823443e-22.
Epoch 652/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 653/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 654/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 655/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 656/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 657/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 658/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 659/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 660/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 661/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00661: ReduceLROnPlateau reducing learning rate to 5.421011119911722e-23.
Epoch 662/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 663/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 664/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 665/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 666/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 667/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 668/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 669/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 670/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 671/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00671: ReduceLROnPlateau reducing learning rate to 2.710505559955861e-23.
Epoch 672/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 673/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 674/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 675/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 676/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 677/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 678/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 679/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 680/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 681/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00681: ReduceLROnPlateau reducing learning rate to 1.3552527799779304e-23.
Epoch 682/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 683/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 684/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 685/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 686/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 687/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 688/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 689/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 690/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 691/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00691: ReduceLROnPlateau reducing learning rate to 6.776263899889652e-24.
Epoch 692/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 693/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 694/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 695/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 696/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 697/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 698/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 699/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 700/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 701/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00701: ReduceLROnPlateau reducing learning rate to 3.388131949944826e-24.
Epoch 702/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 703/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 704/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 705/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 706/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 707/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 708/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 709/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 710/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 711/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00711: ReduceLROnPlateau reducing learning rate to 1.694065974972413e-24.
Epoch 712/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 713/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 714/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 715/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 716/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 717/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 718/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 719/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 720/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 721/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00721: ReduceLROnPlateau reducing learning rate to 8.470329874862065e-25.
Epoch 722/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 723/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 724/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 725/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 726/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 727/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 728/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 729/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 730/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 731/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00731: ReduceLROnPlateau reducing learning rate to 4.2351649374310325e-25.
Epoch 732/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 733/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 734/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 735/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 736/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 737/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 738/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 739/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 740/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 741/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00741: ReduceLROnPlateau reducing learning rate to 2.1175824687155163e-25.
Epoch 742/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 743/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 744/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 745/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 746/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 747/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 748/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 749/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 750/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 751/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00751: ReduceLROnPlateau reducing learning rate to 1.0587912343577581e-25.
Epoch 752/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 753/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 754/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 755/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 756/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 757/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 758/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 759/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 760/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 761/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00761: ReduceLROnPlateau reducing learning rate to 5.293956171788791e-26.
Epoch 762/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 763/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 764/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 765/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 766/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 767/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 768/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 769/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 770/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 771/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00771: ReduceLROnPlateau reducing learning rate to 2.6469780858943953e-26.
Epoch 772/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 773/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 774/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 775/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 776/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 777/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 778/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 779/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 780/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 781/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00781: ReduceLROnPlateau reducing learning rate to 1.3234890429471977e-26.
Epoch 782/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 783/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 784/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 785/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 786/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 787/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 788/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 789/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 790/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 791/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00791: ReduceLROnPlateau reducing learning rate to 6.617445214735988e-27.
Epoch 792/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 793/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 794/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 795/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 796/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 797/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 798/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 799/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 800/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 801/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00801: ReduceLROnPlateau reducing learning rate to 3.308722607367994e-27.
Epoch 802/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 803/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 804/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 805/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 806/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 807/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 808/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 809/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 810/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 811/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00811: ReduceLROnPlateau reducing learning rate to 1.654361303683997e-27.
Epoch 812/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 813/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 814/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 815/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 816/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 817/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 818/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 819/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 820/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 821/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00821: ReduceLROnPlateau reducing learning rate to 8.271806518419985e-28.
Epoch 822/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 823/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 824/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 825/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 826/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 827/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 828/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 829/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 830/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 831/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00831: ReduceLROnPlateau reducing learning rate to 4.135903259209993e-28.
Epoch 832/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 833/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 834/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 835/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 836/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 837/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 838/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 839/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 840/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 841/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00841: ReduceLROnPlateau reducing learning rate to 2.0679516296049964e-28.
Epoch 842/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 843/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 844/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 845/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 846/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 847/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 848/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 849/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 850/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 851/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00851: ReduceLROnPlateau reducing learning rate to 1.0339758148024982e-28.
Epoch 852/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 853/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 854/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 855/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 856/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 857/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 858/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 859/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 860/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 861/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00861: ReduceLROnPlateau reducing learning rate to 5.169879074012491e-29.
Epoch 862/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 863/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 864/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 865/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 866/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 867/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 868/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 869/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 870/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 871/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00871: ReduceLROnPlateau reducing learning rate to 2.5849395370062454e-29.
Epoch 872/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 873/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 874/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 875/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 876/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 877/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 878/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 879/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 880/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 881/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00881: ReduceLROnPlateau reducing learning rate to 1.2924697685031227e-29.
Epoch 882/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 883/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 884/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 885/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 886/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 887/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 888/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 889/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 890/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 891/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00891: ReduceLROnPlateau reducing learning rate to 6.462348842515614e-30.
Epoch 892/1000
231/231 [==============================] - 0s 48us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 893/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 894/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 895/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 896/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 897/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 898/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 899/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 900/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 901/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00901: ReduceLROnPlateau reducing learning rate to 3.231174421257807e-30.
Epoch 902/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 903/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 904/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 905/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 906/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 907/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 908/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 909/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 910/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 911/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00911: ReduceLROnPlateau reducing learning rate to 1.6155872106289034e-30.
Epoch 912/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 913/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 914/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 915/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 916/1000
231/231 [==============================] - 0s 52us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 917/1000
231/231 [==============================] - 0s 56us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 918/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 919/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 920/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 921/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00921: ReduceLROnPlateau reducing learning rate to 8.077936053144517e-31.
Epoch 922/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 923/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 924/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 925/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 926/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 927/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 928/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 929/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 930/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 931/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00931: ReduceLROnPlateau reducing learning rate to 4.0389680265722585e-31.
Epoch 932/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 933/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 934/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 935/1000
231/231 [==============================] - 0s 74us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 936/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 937/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 938/1000
231/231 [==============================] - 0s 112us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 939/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 940/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 941/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00941: ReduceLROnPlateau reducing learning rate to 2.0194840132861292e-31.
Epoch 942/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 943/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 944/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 945/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 946/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 947/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 948/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 949/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 950/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 951/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00951: ReduceLROnPlateau reducing learning rate to 1.0097420066430646e-31.
Epoch 952/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 953/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 954/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 955/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 956/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 957/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 958/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 959/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 960/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 961/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00961: ReduceLROnPlateau reducing learning rate to 5.048710033215323e-32.
Epoch 962/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 963/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 964/1000
231/231 [==============================] - 0s 125us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 965/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 966/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 967/1000
231/231 [==============================] - 0s 121us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 968/1000
231/231 [==============================] - 0s 121us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 969/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 970/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 971/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00971: ReduceLROnPlateau reducing learning rate to 2.5243550166076616e-32.
Epoch 972/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 973/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 974/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 975/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 976/1000
231/231 [==============================] - 0s 112us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 977/1000
231/231 [==============================] - 0s 121us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 978/1000
231/231 [==============================] - 0s 112us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 979/1000
231/231 [==============================] - 0s 112us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 980/1000
231/231 [==============================] - 0s 108us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 981/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00981: ReduceLROnPlateau reducing learning rate to 1.2621775083038308e-32.
Epoch 982/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 983/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 984/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 985/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 986/1000
231/231 [==============================] - 0s 82us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 987/1000
231/231 [==============================] - 0s 87us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 988/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 989/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 990/1000
231/231 [==============================] - 0s 91us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 991/1000
231/231 [==============================] - 0s 121us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923

Epoch 00991: ReduceLROnPlateau reducing learning rate to 6.310887541519154e-33.
Epoch 992/1000
231/231 [==============================] - 0s 104us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 993/1000
231/231 [==============================] - 0s 95us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 994/1000
231/231 [==============================] - 0s 100us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 995/1000
231/231 [==============================] - 0s 78us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 996/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 997/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 998/1000
231/231 [==============================] - 0s 65us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 999/1000
231/231 [==============================] - 0s 69us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
Epoch 1000/1000
231/231 [==============================] - 0s 61us/step - loss: 0.3706 - accuracy: 0.8571 - val_loss: 0.5696 - val_accuracy: 0.6923
In [218]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 1000)
In [219]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
78/78 [==============================] - 0s 51us/step
test loss: 0.5695895949999491, test accuracy: 0.692307710647583
In [220]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.6775271512113618
In [221]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.009523809523809601

KMeans

In [93]:
X
Out[93]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 -0.784459 0.109342 0.681608 1.151904 -0.707724 0.736365 0.241404 -0.461605 1.084621 0.123497 -0.150398 1.784532 0.824544
1 0.213444 0.453851 0.215705 0.987439 -1.851748 -0.001814 -0.218203 0.548263 -0.521851 1.253720 0.882518 -0.892913 0.218184
2 1.478029 0.664210 0.484232 0.450284 -0.427587 -0.448079 -0.195272 -0.610679 -0.675633 0.759098 -0.107303 -0.440790 -1.398093
3 -0.846386 -0.742706 -0.734786 -0.199585 -0.328948 -0.230911 0.620910 0.911236 1.274407 0.150882 -0.603865 -0.919849 0.386069
4 -0.952033 -0.794706 -1.141199 -0.070979 0.454453 0.544600 1.427005 1.918539 1.204102 0.076961 -0.328712 -1.051776 -0.151007
5 -1.244473 2.250723 2.321007 0.549219 1.971653 -1.427849 0.392314 -1.015093 0.157566 0.486970 -0.455070 0.229936 0.185742
6 -0.128652 0.958605 1.191477 0.705974 0.793937 -0.709130 -0.221572 0.922219 0.500475 -0.571099 0.521526 0.567361 -0.486761
7 -1.148662 0.562286 0.636059 0.222732 0.741470 0.009108 -0.949365 -0.486631 -0.519627 -0.739600 -0.415884 -0.026850 0.290685
8 -0.440190 0.461355 -0.016542 -0.158636 0.985626 -0.417310 0.182134 0.290631 0.379949 0.069159 1.080013 0.220566 -0.042505
9 -0.695730 0.398404 1.069978 -0.124019 0.736597 -0.912452 0.673156 0.604840 0.175505 0.496158 0.541788 0.649837 -0.680054
10 -0.006662 0.163832 1.373872 -0.095120 1.621755 1.048509 0.997122 0.721763 0.660834 -1.076324 0.925997 -0.147393 -0.420465
11 -0.771763 -0.484525 -0.874411 0.647747 -1.241650 0.190918 0.457290 0.915208 1.999689 1.879761 0.491598 -0.164372 -0.560754
12 0.140770 1.869847 -1.926303 -2.491201 -2.679759 -1.527330 -0.299345 -0.550878 0.702947 0.143961 0.034796 -0.379551 -0.422354
13 -1.952477 -0.949813 0.063314 1.188657 1.059601 1.221319 0.070346 2.284107 2.889527 2.012105 1.053494 -0.178905 -2.004333
14 -0.895529 0.398850 -0.469782 1.216393 0.657294 -0.550619 -0.854637 -0.815454 1.929689 1.499328 -0.096775 -0.174183 -1.119396
15 -1.161372 1.475106 1.486594 0.127516 0.213940 0.587080 -0.789652 0.130203 1.199389 1.458358 0.404206 0.754289 -0.784214
16 -0.476792 2.179287 0.101035 -1.393755 -0.740834 0.589666 0.873850 0.630539 0.535702 0.387326 -0.979677 0.259755 0.313358
17 -0.089088 -0.841832 0.674093 -0.842623 0.904577 -1.476862 1.853427 -1.108621 0.720923 0.383320 -1.842030 1.712321 -1.612726
18 -1.772732 0.488101 0.057829 0.041074 0.732429 1.052187 0.279830 -0.350521 -0.476338 -0.833438 0.184849 -0.055428 0.627307
19 -0.640351 0.068493 0.619966 -0.599171 0.860806 -0.385120 1.955087 -1.014740 1.224043 1.450896 -2.604448 2.187869 -0.464774
20 0.590240 0.699904 -0.097902 0.127319 -0.882999 0.319144 -0.146142 -0.540616 0.300593 0.688863 0.314647 0.709538 0.572811
21 0.500240 0.875222 -0.833826 0.377484 0.023480 1.321472 1.094037 0.734507 0.141947 0.214524 0.508556 -0.265911 -0.372316
22 -0.076653 0.518030 0.003390 0.452969 -0.218736 0.115409 0.332618 0.611098 0.211893 -0.206368 0.358363 0.614915 0.518172
23 0.010763 -0.352873 -0.460051 0.423968 -0.228393 -0.040296 -0.740869 -0.810034 -1.379366 -0.179024 0.147810 -0.224826 0.615011
24 0.874600 0.173728 -1.041125 0.845285 1.139221 0.264458 -0.378878 0.430226 -0.568469 -1.237333 0.032074 0.812111 0.431460
25 0.200637 0.337376 0.022126 1.189135 -0.210135 -1.195492 0.067874 1.349711 -0.534365 -0.132754 0.055132 0.239009 -0.275633
26 0.362627 0.159292 -1.211688 -0.555502 0.107540 0.797027 -0.246321 -1.113565 -1.373054 -2.369077 -0.539483 1.032005 1.637730
27 -0.504648 -0.561515 -2.173809 -1.525691 -0.810132 -0.617474 0.441103 1.146056 1.464488 -1.111032 -0.742722 0.034623 0.200147
28 -0.339646 -2.140319 -1.409226 -0.207553 -1.216547 -1.135346 -0.831817 1.136334 -0.187159 1.388841 0.282573 -0.807850 -0.371992
29 -1.279089 1.555887 0.890503 2.134195 0.337580 -0.037382 -2.046955 -2.888113 1.329665 1.436687 -1.576201 0.485256 1.429246
... ... ... ... ... ... ... ... ... ... ... ... ... ...
279 -1.132789 -0.931481 -0.350024 -0.228575 -1.201208 -1.044342 0.532403 1.667036 1.383485 -0.967474 -0.286625 -1.920618 -0.797190
280 -0.375948 0.058369 0.489068 0.862825 -1.876102 -0.195043 -1.163295 0.716190 0.384576 -0.168340 1.542126 -0.769460 0.456686
281 0.412883 -1.703432 -0.514845 -1.382818 -0.713972 -0.476089 1.471006 0.826485 0.508608 -1.311788 -2.010635 -1.122699 -0.848851
282 -0.152329 -2.012108 -0.217355 -1.122627 -0.851075 0.634424 1.711007 0.281350 -0.565156 -1.667195 -1.942452 -1.586592 -0.485128
283 0.348443 -2.381428 1.267515 -1.713290 0.161262 -1.589515 1.383857 -0.218429 0.412550 0.382171 -1.073499 -1.745128 -3.227845
284 -0.895866 1.001673 1.059356 0.166883 -0.710729 0.466737 -0.857566 -0.158962 0.004241 0.391823 0.576231 0.329506 -1.331272
285 0.417102 1.957515 2.350604 -1.125042 -2.206390 -0.674814 -1.217854 0.372865 0.840465 -0.472910 0.310419 1.379494 1.128412
286 -0.900897 -0.289100 0.433265 -0.281829 -0.379951 1.272236 0.313949 -0.261980 -0.053111 0.473694 0.493962 -0.263293 -0.657598
287 -0.002448 -0.853612 0.441903 0.406478 -0.823085 0.590185 -0.292046 -0.079952 -0.422138 0.579522 -0.620415 -0.298847 0.620798
288 -0.528092 -1.022206 -0.348679 0.093718 -1.642833 -2.355166 -0.992806 -0.143423 0.270521 0.838321 0.843686 0.469574 -0.325121
289 -0.387248 -1.305014 -0.365540 0.202745 -0.906016 -1.785190 -1.377992 -0.544742 -0.670979 -0.785606 0.505505 0.502505 -0.151297
290 0.425324 -2.583173 -2.181080 -1.262030 -0.179265 0.176164 1.763096 0.436737 -2.048534 -1.014266 1.298221 0.401742 -1.080608
291 -0.572282 -0.375532 -2.067885 -0.361247 -0.315065 -0.671820 -0.183865 -0.517694 -0.802956 -0.951809 0.282442 0.208005 -0.271252
292 -0.084382 -1.508230 -0.105496 -1.930204 -1.529664 -0.795467 1.273717 -1.858542 -0.446361 -0.239346 0.154464 -0.114937 -1.831603
293 -1.172703 0.783209 -1.141589 -0.982768 -0.513216 0.655437 1.962510 0.628858 1.130028 1.104741 1.539591 1.547843 -0.011302
294 -1.293038 0.838303 -1.049071 -0.708031 -0.779995 0.868108 1.621994 0.725495 1.173585 1.424395 1.751950 1.352876 0.339922
295 0.809878 -0.351504 -2.231752 -0.556719 -1.430264 -0.357918 -0.727837 1.110363 1.684188 0.429768 0.560061 0.371789 -1.110030
296 0.305449 -0.148924 -0.727054 -0.126830 0.467272 0.420013 1.212777 0.954055 -0.988419 -0.423614 -0.047239 0.058678 -0.031517
297 1.212224 1.916789 0.287969 -0.073842 0.289112 0.943764 -0.395404 -0.380613 0.262567 0.759137 0.277177 0.493951 1.026995
298 -0.121307 0.217217 0.030920 -0.201270 -0.752001 -0.276070 0.835502 -0.363704 -0.641199 0.283313 0.060013 0.013280 0.477857
299 -0.622824 -0.595352 0.256282 -0.111551 0.023990 1.221659 1.572998 -0.263983 -0.707828 0.707801 0.306249 1.046476 0.214979
300 -0.667480 -0.808638 0.730781 0.054549 0.191421 0.279885 0.088177 0.823617 0.604299 0.640274 -0.360151 1.298688 0.494875
301 0.928382 -2.375767 -0.427528 -0.852350 -1.137004 1.584181 -1.700220 -2.060965 -1.326622 0.451948 0.593212 0.152418 -0.128797
302 -0.483888 0.443846 0.129714 0.199624 -0.106985 0.817702 -0.072817 -1.163918 0.545762 -0.141320 0.041767 -0.402181 0.061897
303 0.715769 0.780533 1.467750 -0.595580 -1.178484 4.014345 -0.112339 -1.611382 -0.295511 0.032462 1.836607 -4.315898 -1.084441
304 0.041466 -0.470275 0.234655 0.109532 -0.518455 -0.977540 -0.613498 -1.108545 0.500653 -0.214143 -0.033265 -0.541673 0.714974
305 0.818747 0.495675 1.005686 0.967334 0.505171 -0.579478 -0.847677 1.574323 1.544556 0.412556 -0.972040 0.290457 0.289042
306 1.062928 -1.149587 1.951840 -0.065775 0.546680 0.994901 -1.817826 2.109742 0.264443 0.505287 -0.757462 0.578677 0.222503
307 -0.701621 -0.049803 -0.719153 -0.048069 1.223251 1.913492 0.887449 0.038186 0.546172 -0.568362 -1.091833 -0.250367 0.831399
308 -0.079821 0.796085 -0.215763 -1.396439 -0.133350 0.582037 2.442796 0.743250 -1.182753 -0.723658 -0.879934 -2.498899 -1.532262

309 rows × 13 columns

In [94]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[94]:
[4016.9999999999995,
 3599.8293806720085,
 3349.349727264702,
 3159.6428991584926,
 3002.905895600155,
 2909.3645052598604,
 2804.695882234172,
 2720.275460001156,
 2622.695881163609,
 2543.6022931320426,
 2484.176525692807,
 2436.6681239209124,
 2402.363548718592,
 2338.201438573343]
In [95]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[95]:
[<matplotlib.lines.Line2D at 0x1e82b4d8dd8>]

K=2

In [96]:
kmeans_mfcc = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[96]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [97]:
kmeans_mfcc.labels_
Out[97]:
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
       0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
       1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
       0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
       0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
       1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
       1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
       0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
       1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
       0])
In [98]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[98]:
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
       0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
       1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
       0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
       0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
       1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
       1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
       1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
       0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
       1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
       0])
In [99]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [100]:
X
Out[100]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 -0.784459 0.109342 0.681608 1.151904 -0.707724 0.736365 0.241404 -0.461605 1.084621 0.123497 -0.150398 1.784532 0.824544 1 0
1 0.213444 0.453851 0.215705 0.987439 -1.851748 -0.001814 -0.218203 0.548263 -0.521851 1.253720 0.882518 -0.892913 0.218184 1 0
2 1.478029 0.664210 0.484232 0.450284 -0.427587 -0.448079 -0.195272 -0.610679 -0.675633 0.759098 -0.107303 -0.440790 -1.398093 0 0
3 -0.846386 -0.742706 -0.734786 -0.199585 -0.328948 -0.230911 0.620910 0.911236 1.274407 0.150882 -0.603865 -0.919849 0.386069 0 0
4 -0.952033 -0.794706 -1.141199 -0.070979 0.454453 0.544600 1.427005 1.918539 1.204102 0.076961 -0.328712 -1.051776 -0.151007 0 0
5 -1.244473 2.250723 2.321007 0.549219 1.971653 -1.427849 0.392314 -1.015093 0.157566 0.486970 -0.455070 0.229936 0.185742 1 0
6 -0.128652 0.958605 1.191477 0.705974 0.793937 -0.709130 -0.221572 0.922219 0.500475 -0.571099 0.521526 0.567361 -0.486761 1 0
7 -1.148662 0.562286 0.636059 0.222732 0.741470 0.009108 -0.949365 -0.486631 -0.519627 -0.739600 -0.415884 -0.026850 0.290685 1 0
8 -0.440190 0.461355 -0.016542 -0.158636 0.985626 -0.417310 0.182134 0.290631 0.379949 0.069159 1.080013 0.220566 -0.042505 1 0
9 -0.695730 0.398404 1.069978 -0.124019 0.736597 -0.912452 0.673156 0.604840 0.175505 0.496158 0.541788 0.649837 -0.680054 1 0
10 -0.006662 0.163832 1.373872 -0.095120 1.621755 1.048509 0.997122 0.721763 0.660834 -1.076324 0.925997 -0.147393 -0.420465 1 0
11 -0.771763 -0.484525 -0.874411 0.647747 -1.241650 0.190918 0.457290 0.915208 1.999689 1.879761 0.491598 -0.164372 -0.560754 1 0
12 0.140770 1.869847 -1.926303 -2.491201 -2.679759 -1.527330 -0.299345 -0.550878 0.702947 0.143961 0.034796 -0.379551 -0.422354 0 0
13 -1.952477 -0.949813 0.063314 1.188657 1.059601 1.221319 0.070346 2.284107 2.889527 2.012105 1.053494 -0.178905 -2.004333 1 0
14 -0.895529 0.398850 -0.469782 1.216393 0.657294 -0.550619 -0.854637 -0.815454 1.929689 1.499328 -0.096775 -0.174183 -1.119396 1 0
15 -1.161372 1.475106 1.486594 0.127516 0.213940 0.587080 -0.789652 0.130203 1.199389 1.458358 0.404206 0.754289 -0.784214 1 0
16 -0.476792 2.179287 0.101035 -1.393755 -0.740834 0.589666 0.873850 0.630539 0.535702 0.387326 -0.979677 0.259755 0.313358 1 0
17 -0.089088 -0.841832 0.674093 -0.842623 0.904577 -1.476862 1.853427 -1.108621 0.720923 0.383320 -1.842030 1.712321 -1.612726 0 0
18 -1.772732 0.488101 0.057829 0.041074 0.732429 1.052187 0.279830 -0.350521 -0.476338 -0.833438 0.184849 -0.055428 0.627307 1 0
19 -0.640351 0.068493 0.619966 -0.599171 0.860806 -0.385120 1.955087 -1.014740 1.224043 1.450896 -2.604448 2.187869 -0.464774 1 0
20 0.590240 0.699904 -0.097902 0.127319 -0.882999 0.319144 -0.146142 -0.540616 0.300593 0.688863 0.314647 0.709538 0.572811 1 0
21 0.500240 0.875222 -0.833826 0.377484 0.023480 1.321472 1.094037 0.734507 0.141947 0.214524 0.508556 -0.265911 -0.372316 1 0
22 -0.076653 0.518030 0.003390 0.452969 -0.218736 0.115409 0.332618 0.611098 0.211893 -0.206368 0.358363 0.614915 0.518172 1 0
23 0.010763 -0.352873 -0.460051 0.423968 -0.228393 -0.040296 -0.740869 -0.810034 -1.379366 -0.179024 0.147810 -0.224826 0.615011 1 0
24 0.874600 0.173728 -1.041125 0.845285 1.139221 0.264458 -0.378878 0.430226 -0.568469 -1.237333 0.032074 0.812111 0.431460 1 0
25 0.200637 0.337376 0.022126 1.189135 -0.210135 -1.195492 0.067874 1.349711 -0.534365 -0.132754 0.055132 0.239009 -0.275633 1 0
26 0.362627 0.159292 -1.211688 -0.555502 0.107540 0.797027 -0.246321 -1.113565 -1.373054 -2.369077 -0.539483 1.032005 1.637730 1 0
27 -0.504648 -0.561515 -2.173809 -1.525691 -0.810132 -0.617474 0.441103 1.146056 1.464488 -1.111032 -0.742722 0.034623 0.200147 0 0
28 -0.339646 -2.140319 -1.409226 -0.207553 -1.216547 -1.135346 -0.831817 1.136334 -0.187159 1.388841 0.282573 -0.807850 -0.371992 0 0
29 -1.279089 1.555887 0.890503 2.134195 0.337580 -0.037382 -2.046955 -2.888113 1.329665 1.436687 -1.576201 0.485256 1.429246 1 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
279 -1.132789 -0.931481 -0.350024 -0.228575 -1.201208 -1.044342 0.532403 1.667036 1.383485 -0.967474 -0.286625 -1.920618 -0.797190 0 1
280 -0.375948 0.058369 0.489068 0.862825 -1.876102 -0.195043 -1.163295 0.716190 0.384576 -0.168340 1.542126 -0.769460 0.456686 1 1
281 0.412883 -1.703432 -0.514845 -1.382818 -0.713972 -0.476089 1.471006 0.826485 0.508608 -1.311788 -2.010635 -1.122699 -0.848851 0 1
282 -0.152329 -2.012108 -0.217355 -1.122627 -0.851075 0.634424 1.711007 0.281350 -0.565156 -1.667195 -1.942452 -1.586592 -0.485128 0 1
283 0.348443 -2.381428 1.267515 -1.713290 0.161262 -1.589515 1.383857 -0.218429 0.412550 0.382171 -1.073499 -1.745128 -3.227845 0 1
284 -0.895866 1.001673 1.059356 0.166883 -0.710729 0.466737 -0.857566 -0.158962 0.004241 0.391823 0.576231 0.329506 -1.331272 1 1
285 0.417102 1.957515 2.350604 -1.125042 -2.206390 -0.674814 -1.217854 0.372865 0.840465 -0.472910 0.310419 1.379494 1.128412 1 1
286 -0.900897 -0.289100 0.433265 -0.281829 -0.379951 1.272236 0.313949 -0.261980 -0.053111 0.473694 0.493962 -0.263293 -0.657598 1 1
287 -0.002448 -0.853612 0.441903 0.406478 -0.823085 0.590185 -0.292046 -0.079952 -0.422138 0.579522 -0.620415 -0.298847 0.620798 1 1
288 -0.528092 -1.022206 -0.348679 0.093718 -1.642833 -2.355166 -0.992806 -0.143423 0.270521 0.838321 0.843686 0.469574 -0.325121 0 1
289 -0.387248 -1.305014 -0.365540 0.202745 -0.906016 -1.785190 -1.377992 -0.544742 -0.670979 -0.785606 0.505505 0.502505 -0.151297 0 1
290 0.425324 -2.583173 -2.181080 -1.262030 -0.179265 0.176164 1.763096 0.436737 -2.048534 -1.014266 1.298221 0.401742 -1.080608 0 1
291 -0.572282 -0.375532 -2.067885 -0.361247 -0.315065 -0.671820 -0.183865 -0.517694 -0.802956 -0.951809 0.282442 0.208005 -0.271252 0 1
292 -0.084382 -1.508230 -0.105496 -1.930204 -1.529664 -0.795467 1.273717 -1.858542 -0.446361 -0.239346 0.154464 -0.114937 -1.831603 0 1
293 -1.172703 0.783209 -1.141589 -0.982768 -0.513216 0.655437 1.962510 0.628858 1.130028 1.104741 1.539591 1.547843 -0.011302 1 1
294 -1.293038 0.838303 -1.049071 -0.708031 -0.779995 0.868108 1.621994 0.725495 1.173585 1.424395 1.751950 1.352876 0.339922 1 1
295 0.809878 -0.351504 -2.231752 -0.556719 -1.430264 -0.357918 -0.727837 1.110363 1.684188 0.429768 0.560061 0.371789 -1.110030 0 1
296 0.305449 -0.148924 -0.727054 -0.126830 0.467272 0.420013 1.212777 0.954055 -0.988419 -0.423614 -0.047239 0.058678 -0.031517 0 1
297 1.212224 1.916789 0.287969 -0.073842 0.289112 0.943764 -0.395404 -0.380613 0.262567 0.759137 0.277177 0.493951 1.026995 1 1
298 -0.121307 0.217217 0.030920 -0.201270 -0.752001 -0.276070 0.835502 -0.363704 -0.641199 0.283313 0.060013 0.013280 0.477857 1 1
299 -0.622824 -0.595352 0.256282 -0.111551 0.023990 1.221659 1.572998 -0.263983 -0.707828 0.707801 0.306249 1.046476 0.214979 1 1
300 -0.667480 -0.808638 0.730781 0.054549 0.191421 0.279885 0.088177 0.823617 0.604299 0.640274 -0.360151 1.298688 0.494875 1 1
301 0.928382 -2.375767 -0.427528 -0.852350 -1.137004 1.584181 -1.700220 -2.060965 -1.326622 0.451948 0.593212 0.152418 -0.128797 0 1
302 -0.483888 0.443846 0.129714 0.199624 -0.106985 0.817702 -0.072817 -1.163918 0.545762 -0.141320 0.041767 -0.402181 0.061897 1 1
303 0.715769 0.780533 1.467750 -0.595580 -1.178484 4.014345 -0.112339 -1.611382 -0.295511 0.032462 1.836607 -4.315898 -1.084441 1 1
304 0.041466 -0.470275 0.234655 0.109532 -0.518455 -0.977540 -0.613498 -1.108545 0.500653 -0.214143 -0.033265 -0.541673 0.714974 1 1
305 0.818747 0.495675 1.005686 0.967334 0.505171 -0.579478 -0.847677 1.574323 1.544556 0.412556 -0.972040 0.290457 0.289042 1 1
306 1.062928 -1.149587 1.951840 -0.065775 0.546680 0.994901 -1.817826 2.109742 0.264443 0.505287 -0.757462 0.578677 0.222503 1 1
307 -0.701621 -0.049803 -0.719153 -0.048069 1.223251 1.913492 0.887449 0.038186 0.546172 -0.568362 -1.091833 -0.250367 0.831399 1 1
308 -0.079821 0.796085 -0.215763 -1.396439 -0.133350 0.582037 2.442796 0.743250 -1.182753 -0.723658 -0.879934 -2.498899 -1.532262 0 1

309 rows × 15 columns

In [101]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[101]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82b505400>
In [222]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[3]))

Hotel Marrakech

ANN

In [268]:
X = df_n_ps_std_mfcc[3]
In [269]:
y = df_n_ps[3]['chosen']
In [270]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [271]:
X_train.shape
Out[271]:
(139, 13)
In [107]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [108]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [109]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [110]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30, 20, 10), 'learning_rate_init': 0.003, 'max_iter': 75}, que permiten obtener un Accuracy de 86.33% y un Kappa del 66.54
Tiempo total: 21.38 minutos
In [272]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30,20,10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.003
epochs = 75
In [273]:
input_tensor = Input(shape = (n0,))
In [274]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [275]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [276]:
model.summary()
Model: "model_12"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_12 (InputLayer)        (None, 13)                0         
_________________________________________________________________
dense_37 (Dense)             (None, 30)                420       
_________________________________________________________________
dense_38 (Dense)             (None, 20)                620       
_________________________________________________________________
dense_39 (Dense)             (None, 10)                210       
_________________________________________________________________
dense_40 (Dense)             (None, 1)                 11        
=================================================================
Total params: 1,261
Trainable params: 1,261
Non-trainable params: 0
_________________________________________________________________
In [277]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 139 samples, validate on 47 samples
Epoch 1/75
139/139 [==============================] - 0s 2ms/step - loss: 0.7119 - accuracy: 0.4748 - val_loss: 0.6764 - val_accuracy: 0.5957
Epoch 2/75
139/139 [==============================] - 0s 79us/step - loss: 0.6629 - accuracy: 0.6619 - val_loss: 0.6286 - val_accuracy: 0.7021
Epoch 3/75
139/139 [==============================] - 0s 72us/step - loss: 0.6227 - accuracy: 0.7914 - val_loss: 0.5921 - val_accuracy: 0.7447
Epoch 4/75
139/139 [==============================] - 0s 79us/step - loss: 0.5865 - accuracy: 0.8058 - val_loss: 0.5597 - val_accuracy: 0.7660
Epoch 5/75
139/139 [==============================] - 0s 72us/step - loss: 0.5507 - accuracy: 0.8201 - val_loss: 0.5260 - val_accuracy: 0.7660
Epoch 6/75
139/139 [==============================] - 0s 79us/step - loss: 0.5136 - accuracy: 0.8489 - val_loss: 0.4960 - val_accuracy: 0.7872
Epoch 7/75
139/139 [==============================] - 0s 86us/step - loss: 0.4773 - accuracy: 0.8417 - val_loss: 0.4729 - val_accuracy: 0.7447
Epoch 8/75
139/139 [==============================] - 0s 72us/step - loss: 0.4456 - accuracy: 0.8417 - val_loss: 0.4574 - val_accuracy: 0.7872
Epoch 9/75
139/139 [==============================] - 0s 79us/step - loss: 0.4162 - accuracy: 0.8417 - val_loss: 0.4409 - val_accuracy: 0.8085
Epoch 10/75
139/139 [==============================] - 0s 93us/step - loss: 0.3904 - accuracy: 0.8561 - val_loss: 0.4262 - val_accuracy: 0.8085
Epoch 11/75
139/139 [==============================] - 0s 79us/step - loss: 0.3699 - accuracy: 0.8561 - val_loss: 0.4161 - val_accuracy: 0.8298
Epoch 12/75
139/139 [==============================] - 0s 79us/step - loss: 0.3578 - accuracy: 0.8633 - val_loss: 0.4064 - val_accuracy: 0.8298
Epoch 13/75
139/139 [==============================] - 0s 79us/step - loss: 0.3470 - accuracy: 0.8633 - val_loss: 0.4064 - val_accuracy: 0.8511
Epoch 14/75
139/139 [==============================] - 0s 79us/step - loss: 0.3383 - accuracy: 0.8633 - val_loss: 0.4040 - val_accuracy: 0.8298
Epoch 15/75
139/139 [==============================] - 0s 79us/step - loss: 0.3269 - accuracy: 0.8633 - val_loss: 0.4150 - val_accuracy: 0.8085
Epoch 16/75
139/139 [==============================] - 0s 72us/step - loss: 0.3161 - accuracy: 0.8777 - val_loss: 0.4421 - val_accuracy: 0.7660
Epoch 17/75
139/139 [==============================] - 0s 86us/step - loss: 0.3101 - accuracy: 0.8777 - val_loss: 0.4574 - val_accuracy: 0.7447
Epoch 18/75
139/139 [==============================] - 0s 79us/step - loss: 0.3062 - accuracy: 0.8993 - val_loss: 0.4546 - val_accuracy: 0.7872
Epoch 19/75
139/139 [==============================] - 0s 86us/step - loss: 0.2971 - accuracy: 0.8993 - val_loss: 0.4487 - val_accuracy: 0.7872
Epoch 20/75
139/139 [==============================] - 0s 79us/step - loss: 0.2904 - accuracy: 0.9065 - val_loss: 0.4424 - val_accuracy: 0.7872
Epoch 21/75
139/139 [==============================] - 0s 79us/step - loss: 0.2835 - accuracy: 0.9065 - val_loss: 0.4318 - val_accuracy: 0.7872
Epoch 22/75
139/139 [==============================] - 0s 115us/step - loss: 0.2776 - accuracy: 0.8921 - val_loss: 0.4251 - val_accuracy: 0.8085
Epoch 23/75
139/139 [==============================] - 0s 108us/step - loss: 0.2706 - accuracy: 0.9065 - val_loss: 0.4328 - val_accuracy: 0.8085

Epoch 00023: ReduceLROnPlateau reducing learning rate to 0.001500000013038516.
Epoch 24/75
139/139 [==============================] - 0s 144us/step - loss: 0.2633 - accuracy: 0.9137 - val_loss: 0.4410 - val_accuracy: 0.7872
Epoch 25/75
139/139 [==============================] - 0s 108us/step - loss: 0.2592 - accuracy: 0.9209 - val_loss: 0.4482 - val_accuracy: 0.7872
Epoch 26/75
139/139 [==============================] - 0s 86us/step - loss: 0.2557 - accuracy: 0.9281 - val_loss: 0.4575 - val_accuracy: 0.7872
Epoch 27/75
139/139 [==============================] - 0s 86us/step - loss: 0.2516 - accuracy: 0.9209 - val_loss: 0.4665 - val_accuracy: 0.7872
Epoch 28/75
139/139 [==============================] - 0s 115us/step - loss: 0.2478 - accuracy: 0.9137 - val_loss: 0.4634 - val_accuracy: 0.8085
Epoch 29/75
139/139 [==============================] - 0s 86us/step - loss: 0.2436 - accuracy: 0.9137 - val_loss: 0.4585 - val_accuracy: 0.8085
Epoch 30/75
139/139 [==============================] - 0s 93us/step - loss: 0.2404 - accuracy: 0.9137 - val_loss: 0.4543 - val_accuracy: 0.8298
Epoch 31/75
139/139 [==============================] - 0s 79us/step - loss: 0.2362 - accuracy: 0.9209 - val_loss: 0.4553 - val_accuracy: 0.8298
Epoch 32/75
139/139 [==============================] - 0s 86us/step - loss: 0.2339 - accuracy: 0.9137 - val_loss: 0.4600 - val_accuracy: 0.8298
Epoch 33/75
139/139 [==============================] - 0s 86us/step - loss: 0.2286 - accuracy: 0.9209 - val_loss: 0.4598 - val_accuracy: 0.8298

Epoch 00033: ReduceLROnPlateau reducing learning rate to 0.000750000006519258.
Epoch 34/75
139/139 [==============================] - 0s 86us/step - loss: 0.2252 - accuracy: 0.9209 - val_loss: 0.4618 - val_accuracy: 0.8298
Epoch 35/75
139/139 [==============================] - 0s 122us/step - loss: 0.2235 - accuracy: 0.9209 - val_loss: 0.4629 - val_accuracy: 0.8298
Epoch 36/75
139/139 [==============================] - 0s 101us/step - loss: 0.2216 - accuracy: 0.9209 - val_loss: 0.4644 - val_accuracy: 0.8298
Epoch 37/75
139/139 [==============================] - 0s 93us/step - loss: 0.2197 - accuracy: 0.9209 - val_loss: 0.4666 - val_accuracy: 0.8298
Epoch 38/75
139/139 [==============================] - 0s 93us/step - loss: 0.2176 - accuracy: 0.9209 - val_loss: 0.4677 - val_accuracy: 0.8298
Epoch 39/75
139/139 [==============================] - 0s 93us/step - loss: 0.2156 - accuracy: 0.9281 - val_loss: 0.4700 - val_accuracy: 0.8298
Epoch 40/75
139/139 [==============================] - 0s 86us/step - loss: 0.2140 - accuracy: 0.9281 - val_loss: 0.4730 - val_accuracy: 0.8298
Epoch 41/75
139/139 [==============================] - 0s 94us/step - loss: 0.2118 - accuracy: 0.9281 - val_loss: 0.4786 - val_accuracy: 0.8085
Epoch 42/75
139/139 [==============================] - 0s 101us/step - loss: 0.2098 - accuracy: 0.9281 - val_loss: 0.4820 - val_accuracy: 0.8085
Epoch 43/75
139/139 [==============================] - 0s 101us/step - loss: 0.2080 - accuracy: 0.9281 - val_loss: 0.4826 - val_accuracy: 0.8085

Epoch 00043: ReduceLROnPlateau reducing learning rate to 0.000375000003259629.
Epoch 44/75
139/139 [==============================] - 0s 93us/step - loss: 0.2061 - accuracy: 0.9281 - val_loss: 0.4818 - val_accuracy: 0.8085
Epoch 45/75
139/139 [==============================] - 0s 101us/step - loss: 0.2052 - accuracy: 0.9353 - val_loss: 0.4817 - val_accuracy: 0.8085
Epoch 46/75
139/139 [==============================] - 0s 101us/step - loss: 0.2044 - accuracy: 0.9353 - val_loss: 0.4812 - val_accuracy: 0.8085
Epoch 47/75
139/139 [==============================] - 0s 86us/step - loss: 0.2033 - accuracy: 0.9353 - val_loss: 0.4803 - val_accuracy: 0.8085
Epoch 48/75
139/139 [==============================] - 0s 86us/step - loss: 0.2023 - accuracy: 0.9353 - val_loss: 0.4808 - val_accuracy: 0.8085
Epoch 49/75
139/139 [==============================] - 0s 93us/step - loss: 0.2014 - accuracy: 0.9281 - val_loss: 0.4804 - val_accuracy: 0.8298
Epoch 50/75
139/139 [==============================] - 0s 79us/step - loss: 0.2006 - accuracy: 0.9281 - val_loss: 0.4809 - val_accuracy: 0.8298
Epoch 51/75
139/139 [==============================] - 0s 86us/step - loss: 0.1999 - accuracy: 0.9281 - val_loss: 0.4809 - val_accuracy: 0.8298
Epoch 52/75
139/139 [==============================] - 0s 86us/step - loss: 0.1990 - accuracy: 0.9281 - val_loss: 0.4836 - val_accuracy: 0.8085
Epoch 53/75
139/139 [==============================] - 0s 101us/step - loss: 0.1977 - accuracy: 0.9281 - val_loss: 0.4847 - val_accuracy: 0.8085

Epoch 00053: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145.
Epoch 54/75
139/139 [==============================] - 0s 101us/step - loss: 0.1969 - accuracy: 0.9281 - val_loss: 0.4852 - val_accuracy: 0.8085
Epoch 55/75
139/139 [==============================] - 0s 86us/step - loss: 0.1964 - accuracy: 0.9281 - val_loss: 0.4860 - val_accuracy: 0.8085
Epoch 56/75
139/139 [==============================] - 0s 93us/step - loss: 0.1960 - accuracy: 0.9281 - val_loss: 0.4854 - val_accuracy: 0.8085
Epoch 57/75
139/139 [==============================] - 0s 93us/step - loss: 0.1955 - accuracy: 0.9281 - val_loss: 0.4850 - val_accuracy: 0.8085
Epoch 58/75
139/139 [==============================] - 0s 101us/step - loss: 0.1950 - accuracy: 0.9281 - val_loss: 0.4849 - val_accuracy: 0.8298
Epoch 59/75
139/139 [==============================] - 0s 79us/step - loss: 0.1946 - accuracy: 0.9281 - val_loss: 0.4848 - val_accuracy: 0.8298
Epoch 60/75
139/139 [==============================] - 0s 93us/step - loss: 0.1940 - accuracy: 0.9281 - val_loss: 0.4851 - val_accuracy: 0.8298
Epoch 61/75
139/139 [==============================] - 0s 86us/step - loss: 0.1936 - accuracy: 0.9281 - val_loss: 0.4849 - val_accuracy: 0.8298
Epoch 62/75
139/139 [==============================] - 0s 86us/step - loss: 0.1932 - accuracy: 0.9353 - val_loss: 0.4862 - val_accuracy: 0.8085
Epoch 63/75
139/139 [==============================] - 0s 86us/step - loss: 0.1927 - accuracy: 0.9353 - val_loss: 0.4871 - val_accuracy: 0.8085

Epoch 00063: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05.
Epoch 64/75
139/139 [==============================] - 0s 137us/step - loss: 0.1922 - accuracy: 0.9353 - val_loss: 0.4875 - val_accuracy: 0.8085
Epoch 65/75
139/139 [==============================] - 0s 93us/step - loss: 0.1919 - accuracy: 0.9353 - val_loss: 0.4880 - val_accuracy: 0.8085
Epoch 66/75
139/139 [==============================] - 0s 93us/step - loss: 0.1917 - accuracy: 0.9353 - val_loss: 0.4891 - val_accuracy: 0.8085
Epoch 67/75
139/139 [==============================] - 0s 93us/step - loss: 0.1914 - accuracy: 0.9424 - val_loss: 0.4895 - val_accuracy: 0.8085
Epoch 68/75
139/139 [==============================] - 0s 129us/step - loss: 0.1912 - accuracy: 0.9424 - val_loss: 0.4900 - val_accuracy: 0.8085
Epoch 69/75
139/139 [==============================] - 0s 122us/step - loss: 0.1909 - accuracy: 0.9424 - val_loss: 0.4904 - val_accuracy: 0.8085
Epoch 70/75
139/139 [==============================] - 0s 129us/step - loss: 0.1908 - accuracy: 0.9424 - val_loss: 0.4912 - val_accuracy: 0.8085
Epoch 71/75
139/139 [==============================] - 0s 101us/step - loss: 0.1905 - accuracy: 0.9424 - val_loss: 0.4911 - val_accuracy: 0.8085
Epoch 72/75
139/139 [==============================] - 0s 86us/step - loss: 0.1902 - accuracy: 0.9424 - val_loss: 0.4912 - val_accuracy: 0.8085
Epoch 73/75
139/139 [==============================] - 0s 79us/step - loss: 0.1900 - accuracy: 0.9424 - val_loss: 0.4910 - val_accuracy: 0.8085

Epoch 00073: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05.
Epoch 74/75
139/139 [==============================] - 0s 86us/step - loss: 0.1898 - accuracy: 0.9424 - val_loss: 0.4910 - val_accuracy: 0.8085
Epoch 75/75
139/139 [==============================] - 0s 72us/step - loss: 0.1896 - accuracy: 0.9424 - val_loss: 0.4910 - val_accuracy: 0.8085
In [278]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 75)
In [279]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
47/47 [==============================] - 0s 170us/step
test loss: 0.49097854279457254, test accuracy: 0.8085106611251831
In [280]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.8571428571428572
In [281]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.5346534653465347

KMeans

In [121]:
X
Out[121]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 0.311006 1.696486 0.912001 -0.211934 -0.513557 1.357699 0.777385 0.508585 -2.290902 -2.422249 -0.738438 -2.221933 -1.191363
1 0.947147 -0.576741 -1.258913 -0.786859 0.887961 -1.895175 -0.310427 -0.374360 1.478062 0.972075 -1.105984 0.612318 -1.486887
2 -0.115048 1.257050 0.043002 -2.677464 3.902183 -1.091787 0.505797 2.341684 -2.345224 -1.678088 -2.608854 -2.617777 -2.135652
3 0.621032 1.417449 1.399722 -0.625673 1.012110 0.230671 -0.287988 1.012771 -2.250326 -0.340971 -0.353905 -0.717440 -0.390485
4 0.340978 1.662814 -1.775422 0.156552 1.678811 0.301711 2.038462 1.511985 1.508787 -2.046602 0.741073 -0.282747 -0.779814
5 0.426765 -1.056701 -1.244088 -0.696846 -0.372415 -0.847420 -0.209607 1.462924 -0.541420 0.000628 -1.135148 1.608546 1.709532
6 0.210857 -1.779497 -2.206121 -0.832640 0.636169 -1.979858 -0.510102 1.437770 0.128209 0.025521 0.184211 2.300204 0.912793
7 -0.821293 -0.049796 0.237440 0.379918 0.714133 0.670070 0.122605 -0.069298 -0.126759 -0.199559 0.547891 -0.099623 -0.024895
8 0.420103 -0.662020 -0.550543 -0.566406 -0.923203 -0.295152 -0.533234 0.927026 0.119135 0.218761 -0.245778 0.627242 1.313952
9 -1.436247 0.435343 2.482690 1.099668 -0.392845 0.565039 0.569531 -0.088218 -0.131137 -0.699769 -0.538549 -0.329443 0.942919
10 -1.574051 -1.334372 -1.636184 1.768991 -0.369456 -0.008046 -1.402331 0.012625 1.135935 1.623145 -0.653935 0.182348 1.052310
11 -1.798986 -1.632467 -1.314854 2.656006 -0.096678 -0.174852 -1.748372 0.185804 0.930317 0.365776 -0.676448 0.358271 1.523770
12 -0.708207 0.931180 0.258840 -0.189291 -0.204832 -0.103872 0.221697 -0.231695 -0.003439 0.423528 1.259835 0.119625 -0.192417
13 -2.007033 -0.288096 0.099713 0.390909 1.333138 -0.069950 0.643074 0.172080 -0.109666 0.304475 -1.157528 -1.708326 -1.420079
14 -0.497985 0.020592 -0.123619 0.165046 -0.765078 -0.465219 0.172533 0.722853 0.284863 -0.035284 0.024769 -0.065990 -0.992437
15 1.200625 0.984580 -0.234312 0.348855 0.175663 0.309396 0.390611 -0.745912 -0.667554 -0.052439 0.119610 -0.862930 0.945979
16 0.435253 3.280178 0.407736 1.143148 2.291571 0.546530 0.170667 0.427708 -0.063936 -0.532360 0.404150 0.415849 0.869331
17 -0.398944 0.035026 -1.634042 -1.354378 0.854385 1.406182 -0.773335 0.663902 0.928496 1.278830 0.464511 0.235475 -0.040374
18 -0.454008 -0.234096 -0.930672 -0.507506 0.545773 0.437756 1.026910 0.013959 -0.620099 -0.593763 1.073690 0.594340 0.987056
19 0.149846 0.062252 -0.002122 0.786346 0.810930 0.304880 -0.882886 -0.043156 2.503584 0.894947 0.394981 0.761651 0.402963
20 -0.314274 0.446482 0.889744 0.891114 1.249237 0.718469 0.296834 -0.831548 -0.393364 -0.103574 0.295790 0.092061 0.424633
21 0.659365 1.053258 -0.877939 -0.295954 -1.122110 -0.035202 1.512616 0.031457 -0.700740 -1.687204 -1.136215 -1.545451 -0.082548
22 0.568507 -0.357318 -1.183577 -0.069205 0.462644 -0.956011 0.501504 0.240708 -0.025482 0.416003 0.237690 -0.566935 -0.846151
23 0.696474 0.477607 -1.637469 -1.158983 -2.224208 -1.861929 -0.176558 0.694585 0.426826 -0.088376 -0.335290 1.125320 0.705700
24 -0.221795 -0.513464 -0.506448 0.594506 0.033232 -1.141879 -1.582503 -0.081204 -0.001962 -0.704687 -0.473528 0.580117 1.533686
25 0.036099 -0.007586 0.116729 0.438081 -1.526141 -1.994283 -1.014100 0.028630 -0.553238 -0.540795 0.467730 0.943285 0.498193
26 -0.291576 -0.372192 -1.176599 0.078535 0.516288 -1.851892 -2.218803 0.335200 0.323222 0.006649 0.017717 0.133172 1.208725
27 0.953536 0.427304 -0.554063 0.425439 1.368674 0.362392 0.477030 -0.976616 -0.382390 0.310619 -0.903078 -0.943886 -0.047616
28 -1.172014 1.307258 -1.059323 -0.655908 1.591107 0.483432 0.474862 0.348014 -0.527448 0.798802 -0.075253 1.943808 0.108268
29 -0.954427 0.000731 -0.367958 0.281024 0.303337 0.744504 1.271647 0.298340 -0.057042 -0.297712 -0.053703 -0.045043 -0.561554
... ... ... ... ... ... ... ... ... ... ... ... ... ...
156 0.232363 -1.167339 -0.114632 1.240724 -0.209611 0.597503 -0.105216 -0.393018 -0.168804 -0.038601 0.602075 -0.482733 0.192333
157 -1.686193 -0.806140 -0.531342 -0.411912 0.312945 0.751058 0.624837 -0.394463 0.549120 -1.174079 -1.374572 -1.950144 -0.652535
158 0.487798 1.116042 -0.308817 0.175231 -0.191701 -0.682970 0.502123 0.749073 1.365476 0.198244 1.283992 0.132188 0.482532
159 1.049575 0.742765 0.000505 0.670386 0.235663 -0.297404 0.891743 0.047729 0.086633 0.873400 0.552393 0.496793 0.659122
160 0.285967 0.602916 -0.009050 0.802464 0.333031 -1.182611 0.473870 0.896236 0.890391 0.208214 0.786475 0.044481 -0.114927
161 2.568510 -0.180837 0.794882 1.410838 0.898076 0.468184 0.963255 0.338074 2.081580 2.353196 0.146660 -0.295606 -0.020484
162 0.821849 0.906757 0.282262 0.304716 -0.691824 0.772704 2.543328 -0.404440 1.861464 1.635426 0.204673 0.084333 0.469447
163 2.581037 0.239015 1.212048 0.498566 0.095720 0.062469 3.463238 0.374969 -0.054235 -0.365031 -0.169020 1.160964 0.666076
164 -0.247271 -0.874145 -0.840584 0.233138 0.034101 0.259892 0.144353 -0.570094 1.244117 0.282845 0.127444 -0.721587 -1.450860
165 0.188979 -0.519200 0.108496 -0.513645 -0.637646 0.812515 0.626360 -0.156977 -0.092241 -0.517923 0.026563 -0.597616 -0.101096
166 -0.015438 -0.656621 -0.739614 0.302131 0.583862 0.465267 0.342075 -0.318902 0.221544 0.654368 0.777463 -0.462212 -0.867288
167 -1.567081 -1.052883 -0.417918 0.636963 -0.531279 0.787238 -1.913461 -0.020653 -0.111129 0.112259 -0.380422 0.497894 0.709826
168 -1.883530 -0.172892 -0.340073 -0.255266 -0.480237 -0.061425 -0.158589 -0.308725 -0.034923 0.150845 0.696367 0.704196 0.473391
169 -1.577057 -0.602693 0.448785 1.073850 -0.714538 1.427240 -1.645225 0.812069 -0.019466 -0.719024 -0.991241 0.521497 0.461555
170 0.667824 -0.298287 -0.412356 -1.154598 0.171532 -0.341146 -0.411827 -1.296671 0.428160 -0.233124 0.035111 -0.934501 0.123701
171 0.626343 -0.046406 -0.168999 -1.278941 0.502027 -0.060296 0.062571 -1.284727 0.028380 -0.488598 -0.255008 -1.199407 0.088115
172 -0.441233 -0.987406 0.015664 -2.982951 -0.379800 1.485748 -2.174788 -3.521120 -0.191502 2.486906 2.771782 0.789523 0.553119
173 0.331514 0.334707 -0.187508 0.489055 1.133140 1.016598 -0.572753 -0.634721 -0.567790 -0.492410 0.634219 -0.122575 -0.205540
174 0.480131 0.345645 0.202709 -0.423456 1.184414 2.116965 -0.463050 0.212550 1.973473 -0.996794 0.924229 -0.170049 -0.068464
175 1.039410 -0.773764 0.113739 -0.796036 -1.053802 -1.238009 0.153897 0.497600 1.347261 0.972165 0.993095 0.066125 -0.069772
176 0.194148 -0.229033 -0.571129 -0.704359 -0.204400 -0.273049 1.105329 0.052851 -0.360196 0.099095 1.063628 -0.266594 0.041526
177 -0.169188 0.025195 -0.189648 0.376353 0.802036 -1.185140 0.488985 0.244963 1.305631 0.241661 0.402415 -0.494815 0.252206
178 1.433007 0.217051 -0.388425 -1.158798 2.068592 1.278810 -1.193547 -0.909321 -0.207122 2.062093 1.374797 0.383804 1.569650
179 1.371536 0.624596 -0.082552 0.444824 1.959112 -0.736647 -1.448177 0.624897 1.304939 -0.025270 -0.519401 0.592135 1.312240
180 0.815061 -1.210119 0.844643 -1.152602 -0.216878 -1.573232 -0.065062 2.136014 -0.285964 1.827988 -0.982121 1.139199 0.936226
181 0.814962 -1.028970 -1.340094 -1.579784 0.774822 -0.351654 -2.148181 2.772395 1.638263 -0.394371 1.796246 1.182459 0.824064
182 1.615277 0.706391 -0.611277 0.513438 0.987249 1.226124 0.240966 0.485917 1.355615 -0.480955 -0.255325 -0.370864 0.107591
183 0.290224 0.578762 0.024629 0.119894 0.626180 1.025427 0.180541 -0.504388 -1.085411 -1.413825 0.811722 0.640653 0.433677
184 0.086408 -1.394139 -0.501233 1.251905 -0.481983 0.026482 -1.317983 -0.580623 -0.160381 -0.718194 0.110108 -0.183905 0.074891
185 0.024909 -0.713904 -1.235134 -0.194562 0.155358 -0.586587 -0.455970 0.577457 1.172268 0.468799 0.500130 1.133624 0.192845

186 rows × 13 columns

In [122]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[122]:
[2418.0,
 2172.1027618675616,
 1998.025251732407,
 1881.5714074571665,
 1790.4988909951144,
 1710.5509624175916,
 1638.1160961205187,
 1594.2979605381602,
 1541.5219098889218,
 1492.348355544395,
 1435.6626781919845,
 1397.526425851007,
 1339.3046499586067,
 1333.0833102955885]
In [123]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[123]:
[<matplotlib.lines.Line2D at 0x1e82cc61860>]

K=3

In [124]:
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[124]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [125]:
kmeans_mfcc.labels_
Out[125]:
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
       1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
       1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
       2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
       0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
       2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
       0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
       1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
       1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
In [126]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[126]:
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
       1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
       1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
       2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
       0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
       2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
       0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
       1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
       1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
In [127]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [128]:
X
Out[128]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 0.311006 1.696486 0.912001 -0.211934 -0.513557 1.357699 0.777385 0.508585 -2.290902 -2.422249 -0.738438 -2.221933 -1.191363 1 0
1 0.947147 -0.576741 -1.258913 -0.786859 0.887961 -1.895175 -0.310427 -0.374360 1.478062 0.972075 -1.105984 0.612318 -1.486887 2 0
2 -0.115048 1.257050 0.043002 -2.677464 3.902183 -1.091787 0.505797 2.341684 -2.345224 -1.678088 -2.608854 -2.617777 -2.135652 1 0
3 0.621032 1.417449 1.399722 -0.625673 1.012110 0.230671 -0.287988 1.012771 -2.250326 -0.340971 -0.353905 -0.717440 -0.390485 1 0
4 0.340978 1.662814 -1.775422 0.156552 1.678811 0.301711 2.038462 1.511985 1.508787 -2.046602 0.741073 -0.282747 -0.779814 0 0
5 0.426765 -1.056701 -1.244088 -0.696846 -0.372415 -0.847420 -0.209607 1.462924 -0.541420 0.000628 -1.135148 1.608546 1.709532 2 0
6 0.210857 -1.779497 -2.206121 -0.832640 0.636169 -1.979858 -0.510102 1.437770 0.128209 0.025521 0.184211 2.300204 0.912793 2 0
7 -0.821293 -0.049796 0.237440 0.379918 0.714133 0.670070 0.122605 -0.069298 -0.126759 -0.199559 0.547891 -0.099623 -0.024895 0 0
8 0.420103 -0.662020 -0.550543 -0.566406 -0.923203 -0.295152 -0.533234 0.927026 0.119135 0.218761 -0.245778 0.627242 1.313952 2 0
9 -1.436247 0.435343 2.482690 1.099668 -0.392845 0.565039 0.569531 -0.088218 -0.131137 -0.699769 -0.538549 -0.329443 0.942919 0 0
10 -1.574051 -1.334372 -1.636184 1.768991 -0.369456 -0.008046 -1.402331 0.012625 1.135935 1.623145 -0.653935 0.182348 1.052310 2 0
11 -1.798986 -1.632467 -1.314854 2.656006 -0.096678 -0.174852 -1.748372 0.185804 0.930317 0.365776 -0.676448 0.358271 1.523770 2 0
12 -0.708207 0.931180 0.258840 -0.189291 -0.204832 -0.103872 0.221697 -0.231695 -0.003439 0.423528 1.259835 0.119625 -0.192417 0 0
13 -2.007033 -0.288096 0.099713 0.390909 1.333138 -0.069950 0.643074 0.172080 -0.109666 0.304475 -1.157528 -1.708326 -1.420079 1 0
14 -0.497985 0.020592 -0.123619 0.165046 -0.765078 -0.465219 0.172533 0.722853 0.284863 -0.035284 0.024769 -0.065990 -0.992437 1 0
15 1.200625 0.984580 -0.234312 0.348855 0.175663 0.309396 0.390611 -0.745912 -0.667554 -0.052439 0.119610 -0.862930 0.945979 0 0
16 0.435253 3.280178 0.407736 1.143148 2.291571 0.546530 0.170667 0.427708 -0.063936 -0.532360 0.404150 0.415849 0.869331 0 0
17 -0.398944 0.035026 -1.634042 -1.354378 0.854385 1.406182 -0.773335 0.663902 0.928496 1.278830 0.464511 0.235475 -0.040374 2 0
18 -0.454008 -0.234096 -0.930672 -0.507506 0.545773 0.437756 1.026910 0.013959 -0.620099 -0.593763 1.073690 0.594340 0.987056 0 0
19 0.149846 0.062252 -0.002122 0.786346 0.810930 0.304880 -0.882886 -0.043156 2.503584 0.894947 0.394981 0.761651 0.402963 0 0
20 -0.314274 0.446482 0.889744 0.891114 1.249237 0.718469 0.296834 -0.831548 -0.393364 -0.103574 0.295790 0.092061 0.424633 0 0
21 0.659365 1.053258 -0.877939 -0.295954 -1.122110 -0.035202 1.512616 0.031457 -0.700740 -1.687204 -1.136215 -1.545451 -0.082548 1 0
22 0.568507 -0.357318 -1.183577 -0.069205 0.462644 -0.956011 0.501504 0.240708 -0.025482 0.416003 0.237690 -0.566935 -0.846151 1 0
23 0.696474 0.477607 -1.637469 -1.158983 -2.224208 -1.861929 -0.176558 0.694585 0.426826 -0.088376 -0.335290 1.125320 0.705700 2 0
24 -0.221795 -0.513464 -0.506448 0.594506 0.033232 -1.141879 -1.582503 -0.081204 -0.001962 -0.704687 -0.473528 0.580117 1.533686 2 0
25 0.036099 -0.007586 0.116729 0.438081 -1.526141 -1.994283 -1.014100 0.028630 -0.553238 -0.540795 0.467730 0.943285 0.498193 2 0
26 -0.291576 -0.372192 -1.176599 0.078535 0.516288 -1.851892 -2.218803 0.335200 0.323222 0.006649 0.017717 0.133172 1.208725 2 0
27 0.953536 0.427304 -0.554063 0.425439 1.368674 0.362392 0.477030 -0.976616 -0.382390 0.310619 -0.903078 -0.943886 -0.047616 0 0
28 -1.172014 1.307258 -1.059323 -0.655908 1.591107 0.483432 0.474862 0.348014 -0.527448 0.798802 -0.075253 1.943808 0.108268 0 0
29 -0.954427 0.000731 -0.367958 0.281024 0.303337 0.744504 1.271647 0.298340 -0.057042 -0.297712 -0.053703 -0.045043 -0.561554 1 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
156 0.232363 -1.167339 -0.114632 1.240724 -0.209611 0.597503 -0.105216 -0.393018 -0.168804 -0.038601 0.602075 -0.482733 0.192333 0 1
157 -1.686193 -0.806140 -0.531342 -0.411912 0.312945 0.751058 0.624837 -0.394463 0.549120 -1.174079 -1.374572 -1.950144 -0.652535 1 1
158 0.487798 1.116042 -0.308817 0.175231 -0.191701 -0.682970 0.502123 0.749073 1.365476 0.198244 1.283992 0.132188 0.482532 0 1
159 1.049575 0.742765 0.000505 0.670386 0.235663 -0.297404 0.891743 0.047729 0.086633 0.873400 0.552393 0.496793 0.659122 0 1
160 0.285967 0.602916 -0.009050 0.802464 0.333031 -1.182611 0.473870 0.896236 0.890391 0.208214 0.786475 0.044481 -0.114927 0 1
161 2.568510 -0.180837 0.794882 1.410838 0.898076 0.468184 0.963255 0.338074 2.081580 2.353196 0.146660 -0.295606 -0.020484 0 1
162 0.821849 0.906757 0.282262 0.304716 -0.691824 0.772704 2.543328 -0.404440 1.861464 1.635426 0.204673 0.084333 0.469447 0 1
163 2.581037 0.239015 1.212048 0.498566 0.095720 0.062469 3.463238 0.374969 -0.054235 -0.365031 -0.169020 1.160964 0.666076 0 1
164 -0.247271 -0.874145 -0.840584 0.233138 0.034101 0.259892 0.144353 -0.570094 1.244117 0.282845 0.127444 -0.721587 -1.450860 1 1
165 0.188979 -0.519200 0.108496 -0.513645 -0.637646 0.812515 0.626360 -0.156977 -0.092241 -0.517923 0.026563 -0.597616 -0.101096 1 1
166 -0.015438 -0.656621 -0.739614 0.302131 0.583862 0.465267 0.342075 -0.318902 0.221544 0.654368 0.777463 -0.462212 -0.867288 1 1
167 -1.567081 -1.052883 -0.417918 0.636963 -0.531279 0.787238 -1.913461 -0.020653 -0.111129 0.112259 -0.380422 0.497894 0.709826 2 1
168 -1.883530 -0.172892 -0.340073 -0.255266 -0.480237 -0.061425 -0.158589 -0.308725 -0.034923 0.150845 0.696367 0.704196 0.473391 2 1
169 -1.577057 -0.602693 0.448785 1.073850 -0.714538 1.427240 -1.645225 0.812069 -0.019466 -0.719024 -0.991241 0.521497 0.461555 2 1
170 0.667824 -0.298287 -0.412356 -1.154598 0.171532 -0.341146 -0.411827 -1.296671 0.428160 -0.233124 0.035111 -0.934501 0.123701 1 1
171 0.626343 -0.046406 -0.168999 -1.278941 0.502027 -0.060296 0.062571 -1.284727 0.028380 -0.488598 -0.255008 -1.199407 0.088115 1 1
172 -0.441233 -0.987406 0.015664 -2.982951 -0.379800 1.485748 -2.174788 -3.521120 -0.191502 2.486906 2.771782 0.789523 0.553119 2 1
173 0.331514 0.334707 -0.187508 0.489055 1.133140 1.016598 -0.572753 -0.634721 -0.567790 -0.492410 0.634219 -0.122575 -0.205540 0 1
174 0.480131 0.345645 0.202709 -0.423456 1.184414 2.116965 -0.463050 0.212550 1.973473 -0.996794 0.924229 -0.170049 -0.068464 0 1
175 1.039410 -0.773764 0.113739 -0.796036 -1.053802 -1.238009 0.153897 0.497600 1.347261 0.972165 0.993095 0.066125 -0.069772 2 1
176 0.194148 -0.229033 -0.571129 -0.704359 -0.204400 -0.273049 1.105329 0.052851 -0.360196 0.099095 1.063628 -0.266594 0.041526 1 1
177 -0.169188 0.025195 -0.189648 0.376353 0.802036 -1.185140 0.488985 0.244963 1.305631 0.241661 0.402415 -0.494815 0.252206 0 1
178 1.433007 0.217051 -0.388425 -1.158798 2.068592 1.278810 -1.193547 -0.909321 -0.207122 2.062093 1.374797 0.383804 1.569650 0 1
179 1.371536 0.624596 -0.082552 0.444824 1.959112 -0.736647 -1.448177 0.624897 1.304939 -0.025270 -0.519401 0.592135 1.312240 0 1
180 0.815061 -1.210119 0.844643 -1.152602 -0.216878 -1.573232 -0.065062 2.136014 -0.285964 1.827988 -0.982121 1.139199 0.936226 2 1
181 0.814962 -1.028970 -1.340094 -1.579784 0.774822 -0.351654 -2.148181 2.772395 1.638263 -0.394371 1.796246 1.182459 0.824064 2 1
182 1.615277 0.706391 -0.611277 0.513438 0.987249 1.226124 0.240966 0.485917 1.355615 -0.480955 -0.255325 -0.370864 0.107591 0 1
183 0.290224 0.578762 0.024629 0.119894 0.626180 1.025427 0.180541 -0.504388 -1.085411 -1.413825 0.811722 0.640653 0.433677 0 1
184 0.086408 -1.394139 -0.501233 1.251905 -0.481983 0.026482 -1.317983 -0.580623 -0.160381 -0.718194 0.110108 -0.183905 0.074891 2 1
185 0.024909 -0.713904 -1.235134 -0.194562 0.155358 -0.586587 -0.455970 0.577457 1.172268 0.468799 0.500130 1.133624 0.192845 2 1

186 rows × 15 columns

In [129]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[129]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82cc917b8>
In [237]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[4]))

Specialized

ANN

In [282]:
X = df_n_ps_std_mfcc[4]
In [283]:
y = df_n_ps[4]['chosen']
In [284]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [285]:
X_train.shape
Out[285]:
(164, 13)
In [135]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [136]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [137]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [138]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.003, 'max_iter': 400}, que permiten obtener un Accuracy de 72.56% y un Kappa del 45.12
Tiempo total: 20.25 minutos
In [286]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20,20,20]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.003
epochs = 400
In [287]:
input_tensor = Input(shape = (n0,))
In [288]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [289]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [290]:
model.summary()
Model: "model_13"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_13 (InputLayer)        (None, 13)                0         
_________________________________________________________________
dense_41 (Dense)             (None, 20)                280       
_________________________________________________________________
dense_42 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_43 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_44 (Dense)             (None, 1)                 21        
=================================================================
Total params: 1,141
Trainable params: 1,141
Non-trainable params: 0
_________________________________________________________________
In [291]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 164 samples, validate on 55 samples
Epoch 1/400
164/164 [==============================] - 0s 1ms/step - loss: 0.7168 - accuracy: 0.4817 - val_loss: 0.6811 - val_accuracy: 0.5636
Epoch 2/400
164/164 [==============================] - 0s 85us/step - loss: 0.6669 - accuracy: 0.5854 - val_loss: 0.6870 - val_accuracy: 0.6000
Epoch 3/400
164/164 [==============================] - 0s 98us/step - loss: 0.6599 - accuracy: 0.6220 - val_loss: 0.7104 - val_accuracy: 0.5818
Epoch 4/400
164/164 [==============================] - 0s 98us/step - loss: 0.6420 - accuracy: 0.6341 - val_loss: 0.7219 - val_accuracy: 0.5636
Epoch 5/400
164/164 [==============================] - 0s 85us/step - loss: 0.6310 - accuracy: 0.6829 - val_loss: 0.7212 - val_accuracy: 0.5636
Epoch 6/400
164/164 [==============================] - 0s 91us/step - loss: 0.6193 - accuracy: 0.6951 - val_loss: 0.7314 - val_accuracy: 0.4727
Epoch 7/400
164/164 [==============================] - 0s 85us/step - loss: 0.6119 - accuracy: 0.7134 - val_loss: 0.7587 - val_accuracy: 0.4182
Epoch 8/400
164/164 [==============================] - 0s 85us/step - loss: 0.6074 - accuracy: 0.6829 - val_loss: 0.8125 - val_accuracy: 0.4182
Epoch 9/400
164/164 [==============================] - 0s 85us/step - loss: 0.5974 - accuracy: 0.6768 - val_loss: 0.8539 - val_accuracy: 0.3455
Epoch 10/400
164/164 [==============================] - 0s 85us/step - loss: 0.5908 - accuracy: 0.6707 - val_loss: 0.8662 - val_accuracy: 0.3273
Epoch 11/400
164/164 [==============================] - 0s 85us/step - loss: 0.5776 - accuracy: 0.6951 - val_loss: 0.8581 - val_accuracy: 0.3455
Epoch 12/400
164/164 [==============================] - 0s 79us/step - loss: 0.5681 - accuracy: 0.7256 - val_loss: 0.8630 - val_accuracy: 0.4000

Epoch 00012: ReduceLROnPlateau reducing learning rate to 0.001500000013038516.
Epoch 13/400
164/164 [==============================] - 0s 85us/step - loss: 0.5613 - accuracy: 0.7195 - val_loss: 0.8672 - val_accuracy: 0.4182
Epoch 14/400
164/164 [==============================] - 0s 91us/step - loss: 0.5581 - accuracy: 0.7195 - val_loss: 0.8675 - val_accuracy: 0.4000
Epoch 15/400
164/164 [==============================] - 0s 98us/step - loss: 0.5507 - accuracy: 0.7195 - val_loss: 0.8727 - val_accuracy: 0.3818
Epoch 16/400
164/164 [==============================] - 0s 91us/step - loss: 0.5440 - accuracy: 0.7317 - val_loss: 0.8778 - val_accuracy: 0.3818
Epoch 17/400
164/164 [==============================] - 0s 91us/step - loss: 0.5369 - accuracy: 0.7317 - val_loss: 0.8860 - val_accuracy: 0.3818
Epoch 18/400
164/164 [==============================] - 0s 91us/step - loss: 0.5301 - accuracy: 0.7378 - val_loss: 0.8864 - val_accuracy: 0.3455
Epoch 19/400
164/164 [==============================] - 0s 91us/step - loss: 0.5248 - accuracy: 0.7317 - val_loss: 0.8862 - val_accuracy: 0.3455
Epoch 20/400
164/164 [==============================] - 0s 98us/step - loss: 0.5199 - accuracy: 0.7439 - val_loss: 0.8968 - val_accuracy: 0.3636
Epoch 21/400
164/164 [==============================] - 0s 85us/step - loss: 0.5140 - accuracy: 0.7439 - val_loss: 0.9013 - val_accuracy: 0.3455
Epoch 22/400
164/164 [==============================] - 0s 91us/step - loss: 0.5075 - accuracy: 0.7439 - val_loss: 0.9059 - val_accuracy: 0.4182

Epoch 00022: ReduceLROnPlateau reducing learning rate to 0.000750000006519258.
Epoch 23/400
164/164 [==============================] - 0s 98us/step - loss: 0.5032 - accuracy: 0.7439 - val_loss: 0.9097 - val_accuracy: 0.4182
Epoch 24/400
164/164 [==============================] - 0s 91us/step - loss: 0.4991 - accuracy: 0.7500 - val_loss: 0.9164 - val_accuracy: 0.4000
Epoch 25/400
164/164 [==============================] - 0s 85us/step - loss: 0.4963 - accuracy: 0.7500 - val_loss: 0.9207 - val_accuracy: 0.4000
Epoch 26/400
164/164 [==============================] - 0s 110us/step - loss: 0.4933 - accuracy: 0.7622 - val_loss: 0.9254 - val_accuracy: 0.4182
Epoch 27/400
164/164 [==============================] - 0s 98us/step - loss: 0.4904 - accuracy: 0.7805 - val_loss: 0.9302 - val_accuracy: 0.4182
Epoch 28/400
164/164 [==============================] - 0s 98us/step - loss: 0.4879 - accuracy: 0.7744 - val_loss: 0.9323 - val_accuracy: 0.4000
Epoch 29/400
164/164 [==============================] - 0s 91us/step - loss: 0.4850 - accuracy: 0.7805 - val_loss: 0.9389 - val_accuracy: 0.3818
Epoch 30/400
164/164 [==============================] - 0s 91us/step - loss: 0.4822 - accuracy: 0.7744 - val_loss: 0.9342 - val_accuracy: 0.3636
Epoch 31/400
164/164 [==============================] - 0s 146us/step - loss: 0.4789 - accuracy: 0.7744 - val_loss: 0.9348 - val_accuracy: 0.3636
Epoch 32/400
164/164 [==============================] - 0s 116us/step - loss: 0.4755 - accuracy: 0.7805 - val_loss: 0.9346 - val_accuracy: 0.3636

Epoch 00032: ReduceLROnPlateau reducing learning rate to 0.000375000003259629.
Epoch 33/400
164/164 [==============================] - 0s 104us/step - loss: 0.4732 - accuracy: 0.7805 - val_loss: 0.9364 - val_accuracy: 0.3636
Epoch 34/400
164/164 [==============================] - 0s 85us/step - loss: 0.4716 - accuracy: 0.7805 - val_loss: 0.9369 - val_accuracy: 0.3636
Epoch 35/400
164/164 [==============================] - 0s 85us/step - loss: 0.4704 - accuracy: 0.7805 - val_loss: 0.9399 - val_accuracy: 0.3636
Epoch 36/400
164/164 [==============================] - 0s 98us/step - loss: 0.4688 - accuracy: 0.7805 - val_loss: 0.9395 - val_accuracy: 0.3636
Epoch 37/400
164/164 [==============================] - 0s 91us/step - loss: 0.4676 - accuracy: 0.7805 - val_loss: 0.9403 - val_accuracy: 0.3636
Epoch 38/400
164/164 [==============================] - 0s 98us/step - loss: 0.4658 - accuracy: 0.7805 - val_loss: 0.9409 - val_accuracy: 0.3636
Epoch 39/400
164/164 [==============================] - 0s 98us/step - loss: 0.4648 - accuracy: 0.7805 - val_loss: 0.9430 - val_accuracy: 0.3636
Epoch 40/400
164/164 [==============================] - 0s 85us/step - loss: 0.4633 - accuracy: 0.7805 - val_loss: 0.9435 - val_accuracy: 0.3636
Epoch 41/400
164/164 [==============================] - 0s 98us/step - loss: 0.4616 - accuracy: 0.7805 - val_loss: 0.9435 - val_accuracy: 0.3636
Epoch 42/400
164/164 [==============================] - 0s 91us/step - loss: 0.4599 - accuracy: 0.7805 - val_loss: 0.9446 - val_accuracy: 0.3818

Epoch 00042: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145.
Epoch 43/400
164/164 [==============================] - 0s 128us/step - loss: 0.4583 - accuracy: 0.7866 - val_loss: 0.9457 - val_accuracy: 0.3818
Epoch 44/400
164/164 [==============================] - 0s 91us/step - loss: 0.4573 - accuracy: 0.7927 - val_loss: 0.9477 - val_accuracy: 0.3818
Epoch 45/400
164/164 [==============================] - 0s 91us/step - loss: 0.4568 - accuracy: 0.7866 - val_loss: 0.9489 - val_accuracy: 0.3818
Epoch 46/400
164/164 [==============================] - 0s 91us/step - loss: 0.4562 - accuracy: 0.7866 - val_loss: 0.9483 - val_accuracy: 0.3818
Epoch 47/400
164/164 [==============================] - 0s 79us/step - loss: 0.4553 - accuracy: 0.7866 - val_loss: 0.9480 - val_accuracy: 0.3636
Epoch 48/400
164/164 [==============================] - 0s 85us/step - loss: 0.4546 - accuracy: 0.7866 - val_loss: 0.9487 - val_accuracy: 0.3636
Epoch 49/400
164/164 [==============================] - 0s 98us/step - loss: 0.4540 - accuracy: 0.7866 - val_loss: 0.9498 - val_accuracy: 0.3636
Epoch 50/400
164/164 [==============================] - 0s 122us/step - loss: 0.4530 - accuracy: 0.7927 - val_loss: 0.9504 - val_accuracy: 0.3818
Epoch 51/400
164/164 [==============================] - 0s 110us/step - loss: 0.4523 - accuracy: 0.7927 - val_loss: 0.9492 - val_accuracy: 0.3818
Epoch 52/400
164/164 [==============================] - 0s 110us/step - loss: 0.4519 - accuracy: 0.7866 - val_loss: 0.9489 - val_accuracy: 0.3818

Epoch 00052: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05.
Epoch 53/400
164/164 [==============================] - 0s 91us/step - loss: 0.4513 - accuracy: 0.7805 - val_loss: 0.9485 - val_accuracy: 0.3818
Epoch 54/400
164/164 [==============================] - 0s 104us/step - loss: 0.4507 - accuracy: 0.7805 - val_loss: 0.9486 - val_accuracy: 0.3818
Epoch 55/400
164/164 [==============================] - 0s 85us/step - loss: 0.4503 - accuracy: 0.7805 - val_loss: 0.9493 - val_accuracy: 0.3818
Epoch 56/400
164/164 [==============================] - 0s 79us/step - loss: 0.4499 - accuracy: 0.7805 - val_loss: 0.9502 - val_accuracy: 0.3818
Epoch 57/400
164/164 [==============================] - 0s 85us/step - loss: 0.4495 - accuracy: 0.7866 - val_loss: 0.9506 - val_accuracy: 0.3818
Epoch 58/400
164/164 [==============================] - 0s 91us/step - loss: 0.4490 - accuracy: 0.7866 - val_loss: 0.9514 - val_accuracy: 0.3818
Epoch 59/400
164/164 [==============================] - 0s 79us/step - loss: 0.4486 - accuracy: 0.7866 - val_loss: 0.9518 - val_accuracy: 0.3818
Epoch 60/400
164/164 [==============================] - 0s 79us/step - loss: 0.4482 - accuracy: 0.7866 - val_loss: 0.9530 - val_accuracy: 0.4000
Epoch 61/400
164/164 [==============================] - 0s 79us/step - loss: 0.4477 - accuracy: 0.7866 - val_loss: 0.9536 - val_accuracy: 0.4000
Epoch 62/400
164/164 [==============================] - 0s 122us/step - loss: 0.4474 - accuracy: 0.7927 - val_loss: 0.9528 - val_accuracy: 0.3818

Epoch 00062: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05.
Epoch 63/400
164/164 [==============================] - 0s 104us/step - loss: 0.4470 - accuracy: 0.7927 - val_loss: 0.9529 - val_accuracy: 0.3818
Epoch 64/400
164/164 [==============================] - 0s 110us/step - loss: 0.4467 - accuracy: 0.7927 - val_loss: 0.9535 - val_accuracy: 0.3818
Epoch 65/400
164/164 [==============================] - 0s 98us/step - loss: 0.4465 - accuracy: 0.7927 - val_loss: 0.9538 - val_accuracy: 0.3818
Epoch 66/400
164/164 [==============================] - 0s 85us/step - loss: 0.4463 - accuracy: 0.7988 - val_loss: 0.9538 - val_accuracy: 0.4000
Epoch 67/400
164/164 [==============================] - 0s 98us/step - loss: 0.4461 - accuracy: 0.8049 - val_loss: 0.9538 - val_accuracy: 0.4000
Epoch 68/400
164/164 [==============================] - 0s 91us/step - loss: 0.4459 - accuracy: 0.7988 - val_loss: 0.9535 - val_accuracy: 0.4000
Epoch 69/400
164/164 [==============================] - 0s 98us/step - loss: 0.4457 - accuracy: 0.7988 - val_loss: 0.9531 - val_accuracy: 0.4000
Epoch 70/400
164/164 [==============================] - 0s 98us/step - loss: 0.4455 - accuracy: 0.7988 - val_loss: 0.9530 - val_accuracy: 0.4000
Epoch 71/400
164/164 [==============================] - 0s 91us/step - loss: 0.4453 - accuracy: 0.7988 - val_loss: 0.9528 - val_accuracy: 0.4000
Epoch 72/400
164/164 [==============================] - 0s 79us/step - loss: 0.4451 - accuracy: 0.7988 - val_loss: 0.9526 - val_accuracy: 0.4000

Epoch 00072: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05.
Epoch 73/400
164/164 [==============================] - 0s 79us/step - loss: 0.4449 - accuracy: 0.7988 - val_loss: 0.9525 - val_accuracy: 0.4000
Epoch 74/400
164/164 [==============================] - 0s 79us/step - loss: 0.4448 - accuracy: 0.7988 - val_loss: 0.9525 - val_accuracy: 0.4000
Epoch 75/400
164/164 [==============================] - 0s 91us/step - loss: 0.4447 - accuracy: 0.7988 - val_loss: 0.9525 - val_accuracy: 0.4000
Epoch 76/400
164/164 [==============================] - 0s 85us/step - loss: 0.4447 - accuracy: 0.7988 - val_loss: 0.9525 - val_accuracy: 0.4000
Epoch 77/400
164/164 [==============================] - 0s 91us/step - loss: 0.4446 - accuracy: 0.7988 - val_loss: 0.9526 - val_accuracy: 0.4000
Epoch 78/400
164/164 [==============================] - 0s 91us/step - loss: 0.4445 - accuracy: 0.7988 - val_loss: 0.9527 - val_accuracy: 0.4000
Epoch 79/400
164/164 [==============================] - 0s 91us/step - loss: 0.4444 - accuracy: 0.7988 - val_loss: 0.9528 - val_accuracy: 0.4182
Epoch 80/400
164/164 [==============================] - 0s 85us/step - loss: 0.4443 - accuracy: 0.7988 - val_loss: 0.9529 - val_accuracy: 0.4000
Epoch 81/400
164/164 [==============================] - 0s 98us/step - loss: 0.4442 - accuracy: 0.7988 - val_loss: 0.9531 - val_accuracy: 0.4000
Epoch 82/400
164/164 [==============================] - 0s 91us/step - loss: 0.4441 - accuracy: 0.7988 - val_loss: 0.9533 - val_accuracy: 0.4000

Epoch 00082: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05.
Epoch 83/400
164/164 [==============================] - 0s 85us/step - loss: 0.4440 - accuracy: 0.7988 - val_loss: 0.9533 - val_accuracy: 0.4182
Epoch 84/400
164/164 [==============================] - 0s 79us/step - loss: 0.4439 - accuracy: 0.7988 - val_loss: 0.9534 - val_accuracy: 0.4182
Epoch 85/400
164/164 [==============================] - 0s 85us/step - loss: 0.4439 - accuracy: 0.7988 - val_loss: 0.9536 - val_accuracy: 0.4182
Epoch 86/400
164/164 [==============================] - 0s 79us/step - loss: 0.4438 - accuracy: 0.8049 - val_loss: 0.9537 - val_accuracy: 0.4182
Epoch 87/400
164/164 [==============================] - 0s 91us/step - loss: 0.4438 - accuracy: 0.8049 - val_loss: 0.9538 - val_accuracy: 0.4182
Epoch 88/400
164/164 [==============================] - 0s 98us/step - loss: 0.4437 - accuracy: 0.8049 - val_loss: 0.9536 - val_accuracy: 0.4182
Epoch 89/400
164/164 [==============================] - 0s 79us/step - loss: 0.4437 - accuracy: 0.8049 - val_loss: 0.9535 - val_accuracy: 0.4182
Epoch 90/400
164/164 [==============================] - 0s 73us/step - loss: 0.4436 - accuracy: 0.8049 - val_loss: 0.9534 - val_accuracy: 0.4182
Epoch 91/400
164/164 [==============================] - 0s 79us/step - loss: 0.4436 - accuracy: 0.8049 - val_loss: 0.9535 - val_accuracy: 0.4182
Epoch 92/400
164/164 [==============================] - 0s 73us/step - loss: 0.4435 - accuracy: 0.8049 - val_loss: 0.9535 - val_accuracy: 0.4182

Epoch 00092: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06.
Epoch 93/400
164/164 [==============================] - 0s 140us/step - loss: 0.4435 - accuracy: 0.8049 - val_loss: 0.9536 - val_accuracy: 0.4182
Epoch 94/400
164/164 [==============================] - 0s 104us/step - loss: 0.4435 - accuracy: 0.8049 - val_loss: 0.9536 - val_accuracy: 0.4182
Epoch 95/400
164/164 [==============================] - 0s 98us/step - loss: 0.4434 - accuracy: 0.8049 - val_loss: 0.9536 - val_accuracy: 0.4182
Epoch 96/400
164/164 [==============================] - 0s 104us/step - loss: 0.4434 - accuracy: 0.8049 - val_loss: 0.9537 - val_accuracy: 0.4182
Epoch 97/400
164/164 [==============================] - 0s 116us/step - loss: 0.4434 - accuracy: 0.8049 - val_loss: 0.9538 - val_accuracy: 0.4182
Epoch 98/400
164/164 [==============================] - 0s 98us/step - loss: 0.4434 - accuracy: 0.8049 - val_loss: 0.9538 - val_accuracy: 0.4182
Epoch 99/400
164/164 [==============================] - 0s 98us/step - loss: 0.4433 - accuracy: 0.8049 - val_loss: 0.9539 - val_accuracy: 0.4182
Epoch 100/400
164/164 [==============================] - 0s 104us/step - loss: 0.4433 - accuracy: 0.8049 - val_loss: 0.9540 - val_accuracy: 0.4182
Epoch 101/400
164/164 [==============================] - 0s 122us/step - loss: 0.4433 - accuracy: 0.8049 - val_loss: 0.9540 - val_accuracy: 0.4182
Epoch 102/400
164/164 [==============================] - 0s 97us/step - loss: 0.4433 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182

Epoch 00102: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06.
Epoch 103/400
164/164 [==============================] - 0s 85us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 104/400
164/164 [==============================] - 0s 91us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 105/400
164/164 [==============================] - 0s 98us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 106/400
164/164 [==============================] - 0s 85us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 107/400
164/164 [==============================] - 0s 91us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 108/400
164/164 [==============================] - 0s 91us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 109/400
164/164 [==============================] - 0s 98us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 110/400
164/164 [==============================] - 0s 85us/step - loss: 0.4432 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 111/400
164/164 [==============================] - 0s 79us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 112/400
164/164 [==============================] - 0s 98us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00112: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06.
Epoch 113/400
164/164 [==============================] - 0s 104us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 114/400
164/164 [==============================] - 0s 110us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 115/400
164/164 [==============================] - 0s 110us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 116/400
164/164 [==============================] - 0s 91us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 117/400
164/164 [==============================] - 0s 79us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 118/400
164/164 [==============================] - 0s 91us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 119/400
164/164 [==============================] - 0s 85us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 120/400
164/164 [==============================] - 0s 98us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 121/400
164/164 [==============================] - 0s 85us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 122/400
164/164 [==============================] - 0s 79us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182

Epoch 00122: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07.
Epoch 123/400
164/164 [==============================] - 0s 85us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 124/400
164/164 [==============================] - 0s 79us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 125/400
164/164 [==============================] - 0s 98us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 126/400
164/164 [==============================] - 0s 85us/step - loss: 0.4431 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 127/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 128/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 129/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 130/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 131/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 132/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182

Epoch 00132: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07.
Epoch 133/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 134/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 135/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 136/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 137/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 138/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 139/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 140/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 141/400
164/164 [==============================] - 0s 122us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9541 - val_accuracy: 0.4182
Epoch 142/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00142: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07.
Epoch 143/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 144/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 145/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 146/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 147/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 148/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 149/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 150/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 151/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 152/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00152: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08.
Epoch 153/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 154/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 155/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 156/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 157/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 158/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 159/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 160/400
164/164 [==============================] - 0s 97us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 161/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 162/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00162: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08.
Epoch 163/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 164/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 165/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 166/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 167/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 168/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 169/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 170/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 171/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 172/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00172: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08.
Epoch 173/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 174/400
164/164 [==============================] - 0s 122us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 175/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 176/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 177/400
164/164 [==============================] - 0s 110us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 178/400
164/164 [==============================] - 0s 97us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 179/400
164/164 [==============================] - 0s 97us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 180/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 181/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 182/400
164/164 [==============================] - 0s 61us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00182: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08.
Epoch 183/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 184/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 185/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 186/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 187/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 188/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 189/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 190/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 191/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 192/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00192: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09.
Epoch 193/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 194/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 195/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 196/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 197/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 198/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 199/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 200/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 201/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 202/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00202: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09.
Epoch 203/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 204/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 205/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 206/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 207/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 208/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 209/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 210/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 211/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 212/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00212: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09.
Epoch 213/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 214/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 215/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 216/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 217/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 218/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 219/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 220/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 221/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 222/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00222: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10.
Epoch 223/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 224/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 225/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 226/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 227/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 228/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 229/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 230/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 231/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 232/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00232: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10.
Epoch 233/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 234/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 235/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 236/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 237/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 238/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 239/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 240/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 241/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 242/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00242: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10.
Epoch 243/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 244/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 245/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 246/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 247/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 248/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 249/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 250/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 251/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 252/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00252: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11.
Epoch 253/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 254/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 255/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 256/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 257/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 258/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 259/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 260/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 261/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 262/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00262: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11.
Epoch 263/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 264/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 265/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 266/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 267/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 268/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 269/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 270/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 271/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 272/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00272: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11.
Epoch 273/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 274/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 275/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 276/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 277/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 278/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 279/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 280/400
164/164 [==============================] - 0s 104us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 281/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 282/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00282: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11.
Epoch 283/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 284/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 285/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 286/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 287/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 288/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 289/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 290/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 291/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 292/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00292: ReduceLROnPlateau reducing learning rate to 5.5879354962651284e-12.
Epoch 293/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 294/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 295/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 296/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 297/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 298/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 299/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 300/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 301/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 302/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00302: ReduceLROnPlateau reducing learning rate to 2.7939677481325642e-12.
Epoch 303/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 304/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 305/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 306/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 307/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 308/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 309/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 310/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 311/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 312/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00312: ReduceLROnPlateau reducing learning rate to 1.3969838740662821e-12.
Epoch 313/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 314/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 315/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 316/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 317/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 318/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 319/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 320/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 321/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 322/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00322: ReduceLROnPlateau reducing learning rate to 6.984919370331411e-13.
Epoch 323/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 324/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 325/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 326/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 327/400
164/164 [==============================] - ETA: 0s - loss: 0.4129 - accuracy: 0.87 - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 328/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 329/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 330/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 331/400
164/164 [==============================] - 0s 104us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 332/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00332: ReduceLROnPlateau reducing learning rate to 3.4924596851657053e-13.
Epoch 333/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 334/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 335/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 336/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 337/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 338/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 339/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 340/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 341/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 342/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00342: ReduceLROnPlateau reducing learning rate to 1.7462298425828526e-13.
Epoch 343/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 344/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 345/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 346/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 347/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 348/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 349/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 350/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 351/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 352/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00352: ReduceLROnPlateau reducing learning rate to 8.731149212914263e-14.
Epoch 353/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 354/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 355/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 356/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 357/400
164/164 [==============================] - 0s 61us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 358/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 359/400
164/164 [==============================] - 0s 61us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 360/400
164/164 [==============================] - 0s 61us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 361/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 362/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00362: ReduceLROnPlateau reducing learning rate to 4.3655746064571316e-14.
Epoch 363/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 364/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 365/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 366/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 367/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 368/400
164/164 [==============================] - 0s 61us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 369/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 370/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 371/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 372/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00372: ReduceLROnPlateau reducing learning rate to 2.1827873032285658e-14.
Epoch 373/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 374/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 375/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 376/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 377/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 378/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 379/400
164/164 [==============================] - 0s 85us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 380/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 381/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 382/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00382: ReduceLROnPlateau reducing learning rate to 1.0913936516142829e-14.
Epoch 383/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 384/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 385/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 386/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 387/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 388/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 389/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 390/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 391/400
164/164 [==============================] - 0s 79us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 392/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182

Epoch 00392: ReduceLROnPlateau reducing learning rate to 5.4569682580714145e-15.
Epoch 393/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 394/400
164/164 [==============================] - 0s 67us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 395/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 396/400
164/164 [==============================] - 0s 110us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 397/400
164/164 [==============================] - 0s 98us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 398/400
164/164 [==============================] - 0s 91us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 399/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
Epoch 400/400
164/164 [==============================] - 0s 73us/step - loss: 0.4430 - accuracy: 0.8049 - val_loss: 0.9542 - val_accuracy: 0.4182
In [292]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
In [293]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
55/55 [==============================] - 0s 55us/step
test loss: 0.9541770685802806, test accuracy: 0.41818180680274963
In [294]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.4260752688172043
In [295]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  -0.13989637305699487

KMeans

In [149]:
X
Out[149]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 0.992062 -0.477172 -1.079451 -2.369470 -1.705431 -0.098594 -0.281836 -1.432001 -0.898623 0.130446 -0.024683 -0.312128 0.020392
1 0.843575 -0.507672 -0.731713 -0.334904 1.442336 -0.491141 -0.266416 -0.511246 1.004414 0.558777 0.127114 -1.667555 0.835458
2 0.816922 -0.263544 0.639646 -0.865417 1.276602 -0.245238 0.106722 -0.761365 -0.170481 -1.443667 -0.451102 1.196430 -0.037846
3 4.368525 0.851784 -0.671158 -0.128467 2.141169 -0.472725 -1.437233 -1.858760 1.581800 -0.145852 0.107228 1.458238 1.666081
4 0.001312 0.535305 -0.648296 0.221414 0.549478 0.736878 -0.439538 -0.138787 0.584258 0.095671 1.901833 2.909252 1.802578
5 -0.236754 0.488978 0.203743 0.088401 -0.151814 0.811707 -0.092973 0.153518 -0.936863 0.354100 0.123352 1.318569 1.097711
6 -0.842496 0.742173 0.068601 1.394492 -0.276167 1.301853 0.336343 1.077540 -1.118983 1.688235 -0.103661 1.224883 0.350956
7 -0.952702 1.078642 -0.563379 -0.018149 -0.073042 -0.591301 -1.392389 0.209234 0.725065 0.064350 0.034449 0.581953 2.151966
8 0.046457 -0.093025 -0.804385 0.542662 -0.130939 0.042792 1.198959 -0.559116 0.017192 -0.249308 0.747851 -0.035599 0.995166
9 -0.781158 0.099463 0.196737 2.462131 0.316140 -0.369698 2.196715 -0.800443 2.137687 1.438443 0.055279 -0.284437 1.702942
10 -0.906167 0.568017 0.700382 2.876646 -0.809125 -0.491839 1.801564 -2.406947 1.939246 1.397556 0.709408 -0.423394 1.773713
11 1.172687 1.292213 -0.402038 0.087342 0.324539 0.973336 -0.548282 0.781195 0.846038 0.464514 -1.030463 -0.559243 0.168727
12 0.367875 1.949889 0.516382 0.657124 -0.534306 0.575187 -0.750861 0.247200 -0.232297 0.332174 -0.426787 0.318763 0.083316
13 1.270520 1.194102 0.267933 0.676186 0.394734 -0.709975 -0.047626 1.113385 0.339962 0.424937 -0.528480 0.671225 0.078062
14 -0.095931 0.792392 0.626113 0.189989 0.315198 -0.175744 0.011713 -0.072196 0.742338 0.974567 0.935685 0.083454 0.970157
15 -0.322645 0.977766 0.685697 0.670670 0.997903 0.619018 0.498110 -0.016728 0.445370 -0.102204 0.199517 -0.315303 0.347920
16 0.565974 0.440551 0.402995 1.815814 1.906139 1.105013 1.256180 0.907086 0.592851 -0.159427 1.013051 -0.620202 1.259932
17 -0.863540 0.887127 1.387720 -0.082168 -0.694633 -0.810037 1.251697 -0.443532 0.307506 0.253798 -0.292483 0.030812 0.176350
18 -0.822258 -0.630193 -0.672294 -0.279417 -0.731983 -1.510167 -1.393705 -0.161872 0.722297 0.910604 -0.610303 0.380547 1.296315
19 -0.889164 0.641922 2.278761 0.190213 -0.341231 -0.624107 1.228820 -0.549441 -0.662942 0.481866 -0.541347 -1.061735 -0.122227
20 0.795964 0.484784 0.898919 0.027625 0.415359 0.271286 0.366966 -0.498975 0.300352 0.216702 0.361195 -0.771976 0.085971
21 0.168183 -0.077353 1.019887 -0.637065 0.731534 0.877245 1.225125 -0.566997 -0.452222 -1.105384 0.185636 -0.782808 -0.224975
22 0.510023 -0.099060 0.064384 -0.039933 0.786951 0.119530 -0.259052 -0.881354 -0.113425 1.191274 0.335443 -0.189618 -0.337688
23 0.216210 -0.069447 0.974822 -0.626273 0.835854 0.914236 1.226463 -0.369525 -0.398299 -1.146613 0.026274 -0.944475 -0.192948
24 -0.239273 -0.518568 -0.127834 0.045011 0.403223 0.368253 -0.584902 -0.905436 -0.405699 0.129383 0.809611 -0.174138 -0.115393
25 -1.241907 1.355534 -0.693470 0.793789 0.606007 0.930263 0.009323 -0.712463 0.037916 -0.182143 1.212760 -0.083882 0.639662
26 -0.847436 1.180146 -0.489592 1.189572 -0.457645 -0.163979 -0.010812 -0.765561 -0.347488 -0.216575 0.804302 -0.236378 0.481212
27 -0.378383 1.017722 -1.812001 0.443514 0.583209 1.709730 0.715521 -0.076610 0.416120 0.013436 0.420025 -0.925263 0.626400
28 0.245370 1.187084 1.056929 2.013063 -0.505622 1.228583 -1.158143 0.622932 0.113512 0.948397 0.008252 1.035839 -0.691702
29 -0.623386 1.368898 1.216933 1.961377 0.744541 1.555516 -1.205283 -0.252995 -0.325624 0.538668 0.197646 0.356450 -0.219812
... ... ... ... ... ... ... ... ... ... ... ... ... ...
189 -0.565077 0.809784 0.557457 0.815038 0.823053 -0.931359 -0.039244 -0.199068 0.083690 -0.235063 -0.030800 -0.564557 -0.253507
190 -0.602848 0.638838 0.763481 -0.424641 -0.810302 -0.951734 -0.732024 -0.504038 0.379372 0.748895 -0.593820 -0.772491 0.175752
191 -1.094031 -0.896961 0.400325 -1.635971 -1.099938 -1.091799 -0.593281 0.890889 0.984647 0.584509 0.318496 0.175062 -0.783524
192 -0.348357 0.944340 0.239675 0.003612 -1.370450 -0.996597 -0.616405 0.161481 -0.258760 0.534721 -0.431338 0.376456 -1.623026
193 2.110671 -1.005236 0.268022 0.459390 -1.985350 0.405677 -0.361571 -1.272053 -0.873345 2.111218 -0.246708 0.798456 1.067252
194 1.222194 -1.600122 -1.149302 0.230839 -0.213026 -1.572114 0.486447 -0.770701 0.244895 2.689114 -2.296486 0.718338 -1.220356
195 -0.509789 -0.757711 0.189267 0.516644 0.750906 -1.485714 2.485824 -1.204754 -3.373113 -0.450016 -1.091178 -0.474728 -0.522197
196 0.194175 -0.618441 -1.090420 0.233017 -1.492602 -0.342192 -1.612833 0.714990 0.072755 -0.026932 0.464029 0.212333 1.204262
197 0.297635 -0.727616 -1.927078 -0.145347 -0.990256 0.052935 -1.791108 -0.351333 -0.064903 0.201842 1.581215 1.084453 -0.168841
198 -0.271030 -0.575137 -1.005334 -0.238705 -0.931830 -1.319114 -0.668613 0.510822 0.209623 0.487577 0.154874 0.133768 1.259548
199 0.059096 -0.370313 -0.760047 0.706270 -2.488266 -1.336692 -0.683584 0.436366 -0.150281 -0.711308 -0.851205 0.253942 -0.052516
200 0.147539 -0.233608 -0.578016 0.870637 -2.418094 -1.286070 -0.692623 0.342693 0.015890 -0.795418 -1.221248 0.309493 -0.526480
201 -0.076214 -1.055629 0.159389 -0.403318 -0.111273 -1.325990 -0.867502 0.519381 0.192007 -0.024629 0.220420 0.551046 0.399728
202 1.468986 0.518464 1.475456 -1.400891 0.408186 -1.831201 1.474742 0.566660 -0.403197 -1.295176 -0.443787 -1.884346 -1.993491
203 -1.739107 0.192104 -0.670709 -1.236237 -1.672915 -0.680127 0.027148 0.524909 1.865754 -0.634310 -0.607429 -1.471191 -0.632982
204 -0.663868 -0.862566 -0.329803 -0.857680 0.167824 -0.013328 0.176565 0.125832 0.609671 -1.296827 -0.435986 -1.341223 -0.977207
205 -0.739818 -0.668220 -0.077479 0.026286 0.027801 0.040659 -0.161646 -1.046948 -1.248976 -0.449243 1.046834 1.381194 1.646325
206 0.475752 0.695473 -0.072097 1.081397 -0.366985 -2.008080 0.515734 0.005330 1.193800 -0.841825 -2.650200 -3.862624 -2.115507
207 -1.331365 -1.632552 -0.876636 0.076190 1.187799 1.138590 1.235955 1.583447 0.890342 -1.587964 0.546109 1.565567 1.756993
208 -0.397476 0.090963 1.217996 0.773741 1.107204 -1.125870 -0.915396 -1.130561 -1.914456 -0.664474 -0.226576 0.112420 0.235011
209 -0.465823 -1.372705 -0.445436 0.316510 -1.492946 -1.103783 0.353513 -0.311377 -1.095388 -0.615078 -0.585868 0.172807 -0.860564
210 -0.594535 -1.761364 -1.069906 -0.502969 -1.411276 -0.906350 -0.559102 -1.240920 -2.254196 -1.206339 -0.528047 0.924112 0.472298
211 -1.022693 0.373374 -0.104205 -0.815628 -0.574733 0.906934 0.765114 -0.015386 0.110695 1.832325 0.712557 -0.951976 -0.678869
212 -0.967902 0.155275 0.013938 -0.549105 -0.907792 0.881907 0.609589 -0.135010 -0.373473 1.152134 0.386511 -0.744687 -0.447017
213 -1.238242 -0.062983 -0.133082 -0.158458 -0.338086 -0.411874 0.964537 0.870379 0.530337 0.858339 0.489332 -1.190977 -1.340484
214 0.349761 -1.391267 -3.069473 0.840195 1.044391 -1.052018 1.004856 1.478511 1.210060 -1.145325 2.653757 1.937234 0.592139
215 0.782819 -1.300386 -0.487318 0.850960 -2.046427 1.050631 0.289069 2.400271 2.707288 -0.278238 0.152360 1.912210 -0.208225
216 1.847553 -1.059174 -0.808403 0.400706 -0.275009 0.409744 -0.141885 0.706348 0.476002 0.990111 -0.168504 0.856440 -0.395652
217 2.608478 0.174234 2.534211 -0.985597 -0.436400 3.751943 1.560179 -2.367095 1.272529 2.464209 -0.954336 0.310720 -1.209456
218 -0.069569 0.418008 -0.004324 1.330358 0.365352 -0.582788 -0.527444 -0.298114 -0.353021 -1.118883 -0.459230 -0.986241 -0.041010

219 rows × 13 columns

In [150]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[150]:
[2847.0,
 2572.5760570812117,
 2370.209947155015,
 2235.6129406180157,
 2112.951551625758,
 2041.1809211260454,
 1982.3615393500422,
 1899.0667595696164,
 1851.9267246215204,
 1760.4468946465518,
 1745.79714786859,
 1689.1350809615656,
 1657.4940102564742,
 1625.370413913055]
In [151]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[151]:
[<matplotlib.lines.Line2D at 0x1e82d2ffda0>]

K=3

In [152]:
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[152]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [153]:
kmeans_mfcc.labels_
Out[153]:
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
       1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
       0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
       1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
       0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
       2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
       0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
       1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
       2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
       1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
In [154]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[154]:
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
       1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
       0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
       1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
       0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
       2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
       0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
       1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
       2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
       1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
In [155]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [156]:
X
Out[156]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 0.992062 -0.477172 -1.079451 -2.369470 -1.705431 -0.098594 -0.281836 -1.432001 -0.898623 0.130446 -0.024683 -0.312128 0.020392 1 0
1 0.843575 -0.507672 -0.731713 -0.334904 1.442336 -0.491141 -0.266416 -0.511246 1.004414 0.558777 0.127114 -1.667555 0.835458 2 0
2 0.816922 -0.263544 0.639646 -0.865417 1.276602 -0.245238 0.106722 -0.761365 -0.170481 -1.443667 -0.451102 1.196430 -0.037846 1 0
3 4.368525 0.851784 -0.671158 -0.128467 2.141169 -0.472725 -1.437233 -1.858760 1.581800 -0.145852 0.107228 1.458238 1.666081 1 0
4 0.001312 0.535305 -0.648296 0.221414 0.549478 0.736878 -0.439538 -0.138787 0.584258 0.095671 1.901833 2.909252 1.802578 0 0
5 -0.236754 0.488978 0.203743 0.088401 -0.151814 0.811707 -0.092973 0.153518 -0.936863 0.354100 0.123352 1.318569 1.097711 0 0
6 -0.842496 0.742173 0.068601 1.394492 -0.276167 1.301853 0.336343 1.077540 -1.118983 1.688235 -0.103661 1.224883 0.350956 0 0
7 -0.952702 1.078642 -0.563379 -0.018149 -0.073042 -0.591301 -1.392389 0.209234 0.725065 0.064350 0.034449 0.581953 2.151966 1 0
8 0.046457 -0.093025 -0.804385 0.542662 -0.130939 0.042792 1.198959 -0.559116 0.017192 -0.249308 0.747851 -0.035599 0.995166 0 0
9 -0.781158 0.099463 0.196737 2.462131 0.316140 -0.369698 2.196715 -0.800443 2.137687 1.438443 0.055279 -0.284437 1.702942 0 0
10 -0.906167 0.568017 0.700382 2.876646 -0.809125 -0.491839 1.801564 -2.406947 1.939246 1.397556 0.709408 -0.423394 1.773713 0 0
11 1.172687 1.292213 -0.402038 0.087342 0.324539 0.973336 -0.548282 0.781195 0.846038 0.464514 -1.030463 -0.559243 0.168727 0 0
12 0.367875 1.949889 0.516382 0.657124 -0.534306 0.575187 -0.750861 0.247200 -0.232297 0.332174 -0.426787 0.318763 0.083316 0 0
13 1.270520 1.194102 0.267933 0.676186 0.394734 -0.709975 -0.047626 1.113385 0.339962 0.424937 -0.528480 0.671225 0.078062 0 0
14 -0.095931 0.792392 0.626113 0.189989 0.315198 -0.175744 0.011713 -0.072196 0.742338 0.974567 0.935685 0.083454 0.970157 0 0
15 -0.322645 0.977766 0.685697 0.670670 0.997903 0.619018 0.498110 -0.016728 0.445370 -0.102204 0.199517 -0.315303 0.347920 0 0
16 0.565974 0.440551 0.402995 1.815814 1.906139 1.105013 1.256180 0.907086 0.592851 -0.159427 1.013051 -0.620202 1.259932 0 0
17 -0.863540 0.887127 1.387720 -0.082168 -0.694633 -0.810037 1.251697 -0.443532 0.307506 0.253798 -0.292483 0.030812 0.176350 0 0
18 -0.822258 -0.630193 -0.672294 -0.279417 -0.731983 -1.510167 -1.393705 -0.161872 0.722297 0.910604 -0.610303 0.380547 1.296315 1 0
19 -0.889164 0.641922 2.278761 0.190213 -0.341231 -0.624107 1.228820 -0.549441 -0.662942 0.481866 -0.541347 -1.061735 -0.122227 2 0
20 0.795964 0.484784 0.898919 0.027625 0.415359 0.271286 0.366966 -0.498975 0.300352 0.216702 0.361195 -0.771976 0.085971 0 0
21 0.168183 -0.077353 1.019887 -0.637065 0.731534 0.877245 1.225125 -0.566997 -0.452222 -1.105384 0.185636 -0.782808 -0.224975 2 0
22 0.510023 -0.099060 0.064384 -0.039933 0.786951 0.119530 -0.259052 -0.881354 -0.113425 1.191274 0.335443 -0.189618 -0.337688 1 0
23 0.216210 -0.069447 0.974822 -0.626273 0.835854 0.914236 1.226463 -0.369525 -0.398299 -1.146613 0.026274 -0.944475 -0.192948 2 0
24 -0.239273 -0.518568 -0.127834 0.045011 0.403223 0.368253 -0.584902 -0.905436 -0.405699 0.129383 0.809611 -0.174138 -0.115393 1 0
25 -1.241907 1.355534 -0.693470 0.793789 0.606007 0.930263 0.009323 -0.712463 0.037916 -0.182143 1.212760 -0.083882 0.639662 0 0
26 -0.847436 1.180146 -0.489592 1.189572 -0.457645 -0.163979 -0.010812 -0.765561 -0.347488 -0.216575 0.804302 -0.236378 0.481212 0 0
27 -0.378383 1.017722 -1.812001 0.443514 0.583209 1.709730 0.715521 -0.076610 0.416120 0.013436 0.420025 -0.925263 0.626400 0 0
28 0.245370 1.187084 1.056929 2.013063 -0.505622 1.228583 -1.158143 0.622932 0.113512 0.948397 0.008252 1.035839 -0.691702 0 0
29 -0.623386 1.368898 1.216933 1.961377 0.744541 1.555516 -1.205283 -0.252995 -0.325624 0.538668 0.197646 0.356450 -0.219812 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
189 -0.565077 0.809784 0.557457 0.815038 0.823053 -0.931359 -0.039244 -0.199068 0.083690 -0.235063 -0.030800 -0.564557 -0.253507 0 1
190 -0.602848 0.638838 0.763481 -0.424641 -0.810302 -0.951734 -0.732024 -0.504038 0.379372 0.748895 -0.593820 -0.772491 0.175752 1 1
191 -1.094031 -0.896961 0.400325 -1.635971 -1.099938 -1.091799 -0.593281 0.890889 0.984647 0.584509 0.318496 0.175062 -0.783524 2 1
192 -0.348357 0.944340 0.239675 0.003612 -1.370450 -0.996597 -0.616405 0.161481 -0.258760 0.534721 -0.431338 0.376456 -1.623026 1 1
193 2.110671 -1.005236 0.268022 0.459390 -1.985350 0.405677 -0.361571 -1.272053 -0.873345 2.111218 -0.246708 0.798456 1.067252 1 1
194 1.222194 -1.600122 -1.149302 0.230839 -0.213026 -1.572114 0.486447 -0.770701 0.244895 2.689114 -2.296486 0.718338 -1.220356 1 1
195 -0.509789 -0.757711 0.189267 0.516644 0.750906 -1.485714 2.485824 -1.204754 -3.373113 -0.450016 -1.091178 -0.474728 -0.522197 2 1
196 0.194175 -0.618441 -1.090420 0.233017 -1.492602 -0.342192 -1.612833 0.714990 0.072755 -0.026932 0.464029 0.212333 1.204262 1 1
197 0.297635 -0.727616 -1.927078 -0.145347 -0.990256 0.052935 -1.791108 -0.351333 -0.064903 0.201842 1.581215 1.084453 -0.168841 1 1
198 -0.271030 -0.575137 -1.005334 -0.238705 -0.931830 -1.319114 -0.668613 0.510822 0.209623 0.487577 0.154874 0.133768 1.259548 1 1
199 0.059096 -0.370313 -0.760047 0.706270 -2.488266 -1.336692 -0.683584 0.436366 -0.150281 -0.711308 -0.851205 0.253942 -0.052516 1 1
200 0.147539 -0.233608 -0.578016 0.870637 -2.418094 -1.286070 -0.692623 0.342693 0.015890 -0.795418 -1.221248 0.309493 -0.526480 1 1
201 -0.076214 -1.055629 0.159389 -0.403318 -0.111273 -1.325990 -0.867502 0.519381 0.192007 -0.024629 0.220420 0.551046 0.399728 1 1
202 1.468986 0.518464 1.475456 -1.400891 0.408186 -1.831201 1.474742 0.566660 -0.403197 -1.295176 -0.443787 -1.884346 -1.993491 2 1
203 -1.739107 0.192104 -0.670709 -1.236237 -1.672915 -0.680127 0.027148 0.524909 1.865754 -0.634310 -0.607429 -1.471191 -0.632982 2 1
204 -0.663868 -0.862566 -0.329803 -0.857680 0.167824 -0.013328 0.176565 0.125832 0.609671 -1.296827 -0.435986 -1.341223 -0.977207 2 1
205 -0.739818 -0.668220 -0.077479 0.026286 0.027801 0.040659 -0.161646 -1.046948 -1.248976 -0.449243 1.046834 1.381194 1.646325 1 1
206 0.475752 0.695473 -0.072097 1.081397 -0.366985 -2.008080 0.515734 0.005330 1.193800 -0.841825 -2.650200 -3.862624 -2.115507 2 1
207 -1.331365 -1.632552 -0.876636 0.076190 1.187799 1.138590 1.235955 1.583447 0.890342 -1.587964 0.546109 1.565567 1.756993 0 1
208 -0.397476 0.090963 1.217996 0.773741 1.107204 -1.125870 -0.915396 -1.130561 -1.914456 -0.664474 -0.226576 0.112420 0.235011 1 1
209 -0.465823 -1.372705 -0.445436 0.316510 -1.492946 -1.103783 0.353513 -0.311377 -1.095388 -0.615078 -0.585868 0.172807 -0.860564 1 1
210 -0.594535 -1.761364 -1.069906 -0.502969 -1.411276 -0.906350 -0.559102 -1.240920 -2.254196 -1.206339 -0.528047 0.924112 0.472298 1 1
211 -1.022693 0.373374 -0.104205 -0.815628 -0.574733 0.906934 0.765114 -0.015386 0.110695 1.832325 0.712557 -0.951976 -0.678869 2 1
212 -0.967902 0.155275 0.013938 -0.549105 -0.907792 0.881907 0.609589 -0.135010 -0.373473 1.152134 0.386511 -0.744687 -0.447017 2 1
213 -1.238242 -0.062983 -0.133082 -0.158458 -0.338086 -0.411874 0.964537 0.870379 0.530337 0.858339 0.489332 -1.190977 -1.340484 2 1
214 0.349761 -1.391267 -3.069473 0.840195 1.044391 -1.052018 1.004856 1.478511 1.210060 -1.145325 2.653757 1.937234 0.592139 0 1
215 0.782819 -1.300386 -0.487318 0.850960 -2.046427 1.050631 0.289069 2.400271 2.707288 -0.278238 0.152360 1.912210 -0.208225 0 1
216 1.847553 -1.059174 -0.808403 0.400706 -0.275009 0.409744 -0.141885 0.706348 0.476002 0.990111 -0.168504 0.856440 -0.395652 1 1
217 2.608478 0.174234 2.534211 -0.985597 -0.436400 3.751943 1.560179 -2.367095 1.272529 2.464209 -0.954336 0.310720 -1.209456 0 1
218 -0.069569 0.418008 -0.004324 1.330358 0.365352 -0.582788 -0.527444 -0.298114 -0.353021 -1.118883 -0.459230 -0.986241 -0.041010 1 1

219 rows × 15 columns

In [157]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[157]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82d379160>
In [252]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[5]))

Urban Place

ANN

In [296]:
X = df_n_ps_std_mfcc[5]
In [297]:
y = df_n_ps[5]['chosen']
In [298]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [299]:
X_train.shape
Out[299]:
(168, 13)
In [163]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [164]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [165]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [166]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'logistic', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.009, 'max_iter': 1000}, que permiten obtener un Accuracy de 73.81% y un Kappa del 36.33
Tiempo total: 24.57 minutos
In [300]:
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
In [301]:
input_tensor = Input(shape = (n0,))
In [302]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [303]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [304]:
model.summary()
Model: "model_14"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_14 (InputLayer)        (None, 13)                0         
_________________________________________________________________
dense_45 (Dense)             (None, 20)                280       
_________________________________________________________________
dense_46 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_47 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_48 (Dense)             (None, 1)                 21        
=================================================================
Total params: 1,141
Trainable params: 1,141
Non-trainable params: 0
_________________________________________________________________
In [305]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 168 samples, validate on 57 samples
Epoch 1/100
168/168 [==============================] - 0s 1ms/step - loss: 0.6734 - accuracy: 0.5714 - val_loss: 0.6686 - val_accuracy: 0.5789
Epoch 2/100
168/168 [==============================] - 0s 107us/step - loss: 0.6107 - accuracy: 0.6667 - val_loss: 0.6513 - val_accuracy: 0.5614
Epoch 3/100
168/168 [==============================] - 0s 77us/step - loss: 0.5755 - accuracy: 0.7202 - val_loss: 0.6565 - val_accuracy: 0.5965
Epoch 4/100
168/168 [==============================] - 0s 71us/step - loss: 0.5608 - accuracy: 0.7262 - val_loss: 0.6731 - val_accuracy: 0.5965
Epoch 5/100
168/168 [==============================] - 0s 65us/step - loss: 0.5544 - accuracy: 0.7321 - val_loss: 0.6889 - val_accuracy: 0.5789
Epoch 6/100
168/168 [==============================] - 0s 71us/step - loss: 0.5457 - accuracy: 0.7321 - val_loss: 0.6953 - val_accuracy: 0.6316
Epoch 7/100
168/168 [==============================] - 0s 65us/step - loss: 0.5374 - accuracy: 0.7202 - val_loss: 0.6785 - val_accuracy: 0.6491
Epoch 8/100
168/168 [==============================] - 0s 71us/step - loss: 0.5297 - accuracy: 0.7321 - val_loss: 0.6659 - val_accuracy: 0.6316
Epoch 9/100
168/168 [==============================] - 0s 65us/step - loss: 0.5187 - accuracy: 0.7381 - val_loss: 0.6761 - val_accuracy: 0.5965
Epoch 10/100
168/168 [==============================] - 0s 71us/step - loss: 0.5126 - accuracy: 0.7381 - val_loss: 0.6996 - val_accuracy: 0.6140
Epoch 11/100
168/168 [==============================] - 0s 71us/step - loss: 0.5062 - accuracy: 0.7619 - val_loss: 0.7055 - val_accuracy: 0.5614
Epoch 12/100
168/168 [==============================] - 0s 71us/step - loss: 0.4976 - accuracy: 0.7738 - val_loss: 0.6888 - val_accuracy: 0.5614
Epoch 13/100
168/168 [==============================] - 0s 71us/step - loss: 0.4849 - accuracy: 0.7679 - val_loss: 0.6634 - val_accuracy: 0.6316
Epoch 14/100
168/168 [==============================] - 0s 71us/step - loss: 0.4724 - accuracy: 0.7738 - val_loss: 0.6561 - val_accuracy: 0.6316
Epoch 15/100
168/168 [==============================] - 0s 71us/step - loss: 0.4594 - accuracy: 0.7679 - val_loss: 0.6666 - val_accuracy: 0.6316
Epoch 16/100
168/168 [==============================] - 0s 71us/step - loss: 0.4471 - accuracy: 0.7917 - val_loss: 0.6749 - val_accuracy: 0.6140
Epoch 17/100
168/168 [==============================] - 0s 71us/step - loss: 0.4307 - accuracy: 0.8095 - val_loss: 0.6773 - val_accuracy: 0.6667
Epoch 18/100
168/168 [==============================] - 0s 71us/step - loss: 0.4218 - accuracy: 0.8155 - val_loss: 0.6815 - val_accuracy: 0.6491
Epoch 19/100
168/168 [==============================] - 0s 71us/step - loss: 0.4026 - accuracy: 0.8333 - val_loss: 0.6837 - val_accuracy: 0.6491
Epoch 20/100
168/168 [==============================] - 0s 107us/step - loss: 0.3847 - accuracy: 0.8512 - val_loss: 0.7176 - val_accuracy: 0.6316
Epoch 21/100
168/168 [==============================] - 0s 95us/step - loss: 0.3716 - accuracy: 0.8631 - val_loss: 0.7504 - val_accuracy: 0.5965
Epoch 22/100
168/168 [==============================] - 0s 95us/step - loss: 0.3500 - accuracy: 0.8571 - val_loss: 0.7848 - val_accuracy: 0.5965
Epoch 23/100
168/168 [==============================] - 0s 77us/step - loss: 0.3446 - accuracy: 0.8571 - val_loss: 0.7247 - val_accuracy: 0.6667
Epoch 24/100
168/168 [==============================] - 0s 71us/step - loss: 0.3145 - accuracy: 0.9167 - val_loss: 0.6438 - val_accuracy: 0.6667
Epoch 25/100
168/168 [==============================] - 0s 65us/step - loss: 0.2989 - accuracy: 0.8929 - val_loss: 0.6838 - val_accuracy: 0.6491
Epoch 26/100
168/168 [==============================] - 0s 71us/step - loss: 0.2818 - accuracy: 0.9048 - val_loss: 0.7048 - val_accuracy: 0.6140
Epoch 27/100
168/168 [==============================] - 0s 65us/step - loss: 0.2584 - accuracy: 0.9167 - val_loss: 0.6730 - val_accuracy: 0.6667

Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 28/100
168/168 [==============================] - 0s 65us/step - loss: 0.2363 - accuracy: 0.9167 - val_loss: 0.6651 - val_accuracy: 0.6667
Epoch 29/100
168/168 [==============================] - 0s 65us/step - loss: 0.2298 - accuracy: 0.9345 - val_loss: 0.6837 - val_accuracy: 0.6491
Epoch 30/100
168/168 [==============================] - 0s 65us/step - loss: 0.2195 - accuracy: 0.9464 - val_loss: 0.7148 - val_accuracy: 0.6491
Epoch 31/100
168/168 [==============================] - 0s 71us/step - loss: 0.2092 - accuracy: 0.9464 - val_loss: 0.7164 - val_accuracy: 0.6667
Epoch 32/100
168/168 [==============================] - 0s 59us/step - loss: 0.1981 - accuracy: 0.9583 - val_loss: 0.7310 - val_accuracy: 0.6316
Epoch 33/100
168/168 [==============================] - 0s 71us/step - loss: 0.1945 - accuracy: 0.9524 - val_loss: 0.7229 - val_accuracy: 0.6491
Epoch 34/100
168/168 [==============================] - 0s 65us/step - loss: 0.1883 - accuracy: 0.9583 - val_loss: 0.6987 - val_accuracy: 0.6491
Epoch 35/100
168/168 [==============================] - 0s 101us/step - loss: 0.1769 - accuracy: 0.9583 - val_loss: 0.6846 - val_accuracy: 0.6842
Epoch 36/100
168/168 [==============================] - 0s 77us/step - loss: 0.1693 - accuracy: 0.9643 - val_loss: 0.6990 - val_accuracy: 0.6842
Epoch 37/100
168/168 [==============================] - 0s 77us/step - loss: 0.1620 - accuracy: 0.9643 - val_loss: 0.7038 - val_accuracy: 0.6667
Epoch 38/100
168/168 [==============================] - 0s 77us/step - loss: 0.1498 - accuracy: 0.9702 - val_loss: 0.7501 - val_accuracy: 0.6667
Epoch 39/100
168/168 [==============================] - 0s 95us/step - loss: 0.1469 - accuracy: 0.9702 - val_loss: 0.7930 - val_accuracy: 0.6667
Epoch 40/100
168/168 [==============================] - 0s 71us/step - loss: 0.1415 - accuracy: 0.9702 - val_loss: 0.7957 - val_accuracy: 0.6667
Epoch 41/100
168/168 [==============================] - 0s 65us/step - loss: 0.1302 - accuracy: 0.9702 - val_loss: 0.7824 - val_accuracy: 0.6842
Epoch 42/100
168/168 [==============================] - 0s 71us/step - loss: 0.1227 - accuracy: 0.9821 - val_loss: 0.7895 - val_accuracy: 0.6842
Epoch 43/100
168/168 [==============================] - 0s 77us/step - loss: 0.1172 - accuracy: 0.9881 - val_loss: 0.7901 - val_accuracy: 0.6667
Epoch 44/100
168/168 [==============================] - 0s 71us/step - loss: 0.1115 - accuracy: 0.9881 - val_loss: 0.8162 - val_accuracy: 0.6667
Epoch 45/100
168/168 [==============================] - 0s 71us/step - loss: 0.1054 - accuracy: 0.9821 - val_loss: 0.8401 - val_accuracy: 0.6842

Epoch 00045: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 46/100
168/168 [==============================] - 0s 65us/step - loss: 0.0997 - accuracy: 0.9881 - val_loss: 0.8561 - val_accuracy: 0.6667
Epoch 47/100
168/168 [==============================] - 0s 71us/step - loss: 0.0961 - accuracy: 0.9881 - val_loss: 0.8576 - val_accuracy: 0.6667
Epoch 48/100
168/168 [==============================] - 0s 71us/step - loss: 0.0936 - accuracy: 0.9881 - val_loss: 0.8643 - val_accuracy: 0.6667
Epoch 49/100
168/168 [==============================] - 0s 71us/step - loss: 0.0908 - accuracy: 0.9940 - val_loss: 0.8631 - val_accuracy: 0.6491
Epoch 50/100
168/168 [==============================] - 0s 71us/step - loss: 0.0884 - accuracy: 0.9940 - val_loss: 0.8676 - val_accuracy: 0.6491
Epoch 51/100
168/168 [==============================] - 0s 71us/step - loss: 0.0856 - accuracy: 0.9940 - val_loss: 0.8607 - val_accuracy: 0.6491
Epoch 52/100
168/168 [==============================] - 0s 77us/step - loss: 0.0838 - accuracy: 0.9881 - val_loss: 0.8592 - val_accuracy: 0.6491
Epoch 53/100
168/168 [==============================] - 0s 71us/step - loss: 0.0819 - accuracy: 0.9881 - val_loss: 0.8771 - val_accuracy: 0.6667
Epoch 54/100
168/168 [==============================] - 0s 71us/step - loss: 0.0794 - accuracy: 0.9940 - val_loss: 0.8912 - val_accuracy: 0.6667
Epoch 55/100
168/168 [==============================] - 0s 65us/step - loss: 0.0777 - accuracy: 0.9940 - val_loss: 0.9022 - val_accuracy: 0.6667

Epoch 00055: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 56/100
168/168 [==============================] - 0s 65us/step - loss: 0.0740 - accuracy: 1.0000 - val_loss: 0.8995 - val_accuracy: 0.6667
Epoch 57/100
168/168 [==============================] - 0s 71us/step - loss: 0.0733 - accuracy: 1.0000 - val_loss: 0.8922 - val_accuracy: 0.7018
Epoch 58/100
168/168 [==============================] - 0s 65us/step - loss: 0.0719 - accuracy: 1.0000 - val_loss: 0.8924 - val_accuracy: 0.6842
Epoch 59/100
168/168 [==============================] - 0s 65us/step - loss: 0.0707 - accuracy: 1.0000 - val_loss: 0.8931 - val_accuracy: 0.6842
Epoch 60/100
168/168 [==============================] - 0s 65us/step - loss: 0.0695 - accuracy: 1.0000 - val_loss: 0.8913 - val_accuracy: 0.6842
Epoch 61/100
168/168 [==============================] - 0s 77us/step - loss: 0.0685 - accuracy: 1.0000 - val_loss: 0.8970 - val_accuracy: 0.6842
Epoch 62/100
168/168 [==============================] - 0s 71us/step - loss: 0.0674 - accuracy: 1.0000 - val_loss: 0.8957 - val_accuracy: 0.7018
Epoch 63/100
168/168 [==============================] - 0s 71us/step - loss: 0.0663 - accuracy: 1.0000 - val_loss: 0.9018 - val_accuracy: 0.7018
Epoch 64/100
168/168 [==============================] - 0s 65us/step - loss: 0.0653 - accuracy: 1.0000 - val_loss: 0.9069 - val_accuracy: 0.6667
Epoch 65/100
168/168 [==============================] - 0s 83us/step - loss: 0.0646 - accuracy: 1.0000 - val_loss: 0.9088 - val_accuracy: 0.6842
Epoch 66/100
168/168 [==============================] - 0s 119us/step - loss: 0.0636 - accuracy: 1.0000 - val_loss: 0.9138 - val_accuracy: 0.6842
Epoch 67/100
168/168 [==============================] - 0s 101us/step - loss: 0.0626 - accuracy: 1.0000 - val_loss: 0.9116 - val_accuracy: 0.6842

Epoch 00067: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 68/100
168/168 [==============================] - 0s 65us/step - loss: 0.0616 - accuracy: 1.0000 - val_loss: 0.9120 - val_accuracy: 0.6842
Epoch 69/100
168/168 [==============================] - 0s 71us/step - loss: 0.0609 - accuracy: 1.0000 - val_loss: 0.9102 - val_accuracy: 0.6842
Epoch 70/100
168/168 [==============================] - 0s 71us/step - loss: 0.0605 - accuracy: 1.0000 - val_loss: 0.9081 - val_accuracy: 0.7018
Epoch 71/100
168/168 [==============================] - 0s 71us/step - loss: 0.0599 - accuracy: 1.0000 - val_loss: 0.9080 - val_accuracy: 0.7018
Epoch 72/100
168/168 [==============================] - 0s 65us/step - loss: 0.0596 - accuracy: 1.0000 - val_loss: 0.9082 - val_accuracy: 0.7018
Epoch 73/100
168/168 [==============================] - 0s 77us/step - loss: 0.0590 - accuracy: 1.0000 - val_loss: 0.9162 - val_accuracy: 0.7018
Epoch 74/100
168/168 [==============================] - 0s 71us/step - loss: 0.0586 - accuracy: 1.0000 - val_loss: 0.9215 - val_accuracy: 0.7018
Epoch 75/100
168/168 [==============================] - 0s 71us/step - loss: 0.0582 - accuracy: 1.0000 - val_loss: 0.9252 - val_accuracy: 0.7018
Epoch 76/100
168/168 [==============================] - 0s 71us/step - loss: 0.0577 - accuracy: 1.0000 - val_loss: 0.9273 - val_accuracy: 0.7018
Epoch 77/100
168/168 [==============================] - 0s 71us/step - loss: 0.0572 - accuracy: 1.0000 - val_loss: 0.9286 - val_accuracy: 0.7018

Epoch 00077: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 78/100
168/168 [==============================] - 0s 65us/step - loss: 0.0567 - accuracy: 1.0000 - val_loss: 0.9295 - val_accuracy: 0.7018
Epoch 79/100
168/168 [==============================] - 0s 77us/step - loss: 0.0566 - accuracy: 1.0000 - val_loss: 0.9291 - val_accuracy: 0.6842
Epoch 80/100
168/168 [==============================] - 0s 71us/step - loss: 0.0564 - accuracy: 1.0000 - val_loss: 0.9298 - val_accuracy: 0.6842
Epoch 81/100
168/168 [==============================] - 0s 71us/step - loss: 0.0561 - accuracy: 1.0000 - val_loss: 0.9305 - val_accuracy: 0.6842
Epoch 82/100
168/168 [==============================] - 0s 77us/step - loss: 0.0559 - accuracy: 1.0000 - val_loss: 0.9319 - val_accuracy: 0.6842
Epoch 83/100
168/168 [==============================] - 0s 71us/step - loss: 0.0557 - accuracy: 1.0000 - val_loss: 0.9349 - val_accuracy: 0.6842
Epoch 84/100
168/168 [==============================] - 0s 65us/step - loss: 0.0554 - accuracy: 1.0000 - val_loss: 0.9362 - val_accuracy: 0.6842
Epoch 85/100
168/168 [==============================] - 0s 65us/step - loss: 0.0553 - accuracy: 1.0000 - val_loss: 0.9369 - val_accuracy: 0.6842
Epoch 86/100
168/168 [==============================] - 0s 65us/step - loss: 0.0551 - accuracy: 1.0000 - val_loss: 0.9363 - val_accuracy: 0.6842
Epoch 87/100
168/168 [==============================] - 0s 65us/step - loss: 0.0549 - accuracy: 1.0000 - val_loss: 0.9361 - val_accuracy: 0.6842

Epoch 00087: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 88/100
168/168 [==============================] - 0s 65us/step - loss: 0.0546 - accuracy: 1.0000 - val_loss: 0.9357 - val_accuracy: 0.6842
Epoch 89/100
168/168 [==============================] - 0s 65us/step - loss: 0.0545 - accuracy: 1.0000 - val_loss: 0.9352 - val_accuracy: 0.6842
Epoch 90/100
168/168 [==============================] - 0s 65us/step - loss: 0.0544 - accuracy: 1.0000 - val_loss: 0.9354 - val_accuracy: 0.6842
Epoch 91/100
168/168 [==============================] - 0s 65us/step - loss: 0.0543 - accuracy: 1.0000 - val_loss: 0.9351 - val_accuracy: 0.6842
Epoch 92/100
168/168 [==============================] - 0s 77us/step - loss: 0.0542 - accuracy: 1.0000 - val_loss: 0.9347 - val_accuracy: 0.6842
Epoch 93/100
168/168 [==============================] - 0s 113us/step - loss: 0.0540 - accuracy: 1.0000 - val_loss: 0.9349 - val_accuracy: 0.6842
Epoch 94/100
168/168 [==============================] - 0s 107us/step - loss: 0.0540 - accuracy: 1.0000 - val_loss: 0.9350 - val_accuracy: 0.7018
Epoch 95/100
168/168 [==============================] - 0s 101us/step - loss: 0.0538 - accuracy: 1.0000 - val_loss: 0.9367 - val_accuracy: 0.7018
Epoch 96/100
168/168 [==============================] - 0s 89us/step - loss: 0.0537 - accuracy: 1.0000 - val_loss: 0.9379 - val_accuracy: 0.7018
Epoch 97/100
168/168 [==============================] - 0s 71us/step - loss: 0.0536 - accuracy: 1.0000 - val_loss: 0.9380 - val_accuracy: 0.7018

Epoch 00097: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 98/100
168/168 [==============================] - 0s 71us/step - loss: 0.0535 - accuracy: 1.0000 - val_loss: 0.9380 - val_accuracy: 0.7018
Epoch 99/100
168/168 [==============================] - 0s 71us/step - loss: 0.0534 - accuracy: 1.0000 - val_loss: 0.9380 - val_accuracy: 0.7018
Epoch 100/100
168/168 [==============================] - 0s 77us/step - loss: 0.0533 - accuracy: 1.0000 - val_loss: 0.9381 - val_accuracy: 0.7018
In [306]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 100)
In [307]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
57/57 [==============================] - 0s 53us/step
test loss: 0.9380739571755392, test accuracy: 0.7017543911933899
In [308]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.7188365650969529
In [309]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.37037037037037035

KMeans

In [177]:
X
Out[177]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 -1.430409 -0.286045 0.490919 0.872836 -0.908379 -0.032724 -0.630149 -0.077256 -0.666116 -0.409507 -0.808189 -0.316827 1.001635
1 -0.282157 -1.478798 -1.125065 1.134727 0.294401 -1.552108 -2.048764 -0.332843 -0.298952 1.114161 0.274322 1.796602 2.309045
2 -0.419749 0.440468 0.853257 0.239250 -0.398831 -0.780274 -0.360447 -0.316940 -0.976474 0.879549 -0.457280 0.640345 0.645756
3 -0.431942 -1.517593 0.673149 0.786628 -1.306695 1.366669 1.142564 -2.034919 -0.374780 0.475572 -0.952521 0.198178 0.640593
4 0.011173 0.536562 -0.966199 -0.482951 0.331291 0.606478 0.539582 -0.270964 0.042364 -0.165555 0.123590 0.101357 -0.658096
5 0.126251 -0.590904 -1.517639 -1.072279 0.624234 1.023491 0.771080 0.598433 -0.513511 -0.860661 0.652573 1.694095 0.860230
6 0.830490 -0.982750 2.985373 1.132038 0.506664 3.816969 -0.181042 -0.077794 -0.404476 -0.632689 -0.730525 -0.299915 -0.619079
7 -0.461917 0.208363 0.278132 2.301636 1.667266 0.690336 1.117026 1.428294 1.041434 0.404890 -1.225161 -1.083934 0.065409
8 0.249199 -0.247143 -0.916299 0.679460 -0.342859 0.174455 0.125219 0.900110 0.761341 -0.398986 -1.063148 0.855251 2.099798
9 0.311144 1.364526 -0.004701 0.405862 -0.939024 0.796235 -0.589807 -0.084900 0.092344 -0.628186 -0.601718 -0.966679 0.086483
10 1.334587 0.242352 0.597807 1.994512 -0.050194 0.166573 -0.234755 0.602868 -1.535513 -3.732625 -0.901353 0.263896 -1.060994
11 1.318614 -0.797973 0.011175 -0.735851 -0.310309 -1.472949 -0.886459 0.062987 -0.118121 0.096635 0.405075 -1.082018 0.068160
12 0.824621 1.143049 0.195198 -0.036161 0.107483 -0.112574 -0.229915 -0.249936 0.727625 -0.048902 0.611366 0.128240 0.758492
13 -0.568472 -0.848858 0.090767 1.154534 -0.215999 -2.659688 -1.881352 0.412350 -0.790718 0.827656 0.539638 1.593572 2.002635
14 0.406968 1.201176 1.075155 -0.124700 0.135767 -0.178145 -0.204776 -1.618374 0.193121 0.693318 1.055785 -0.160648 0.537962
15 -0.287723 1.515404 0.539186 0.427811 -0.006824 0.845973 0.412855 -0.313048 0.703395 0.284501 -0.348641 -0.519018 -0.664237
16 0.522131 0.617054 0.277861 -0.836147 -1.629099 -0.005864 0.191114 0.043306 0.248219 0.814197 -0.101490 -0.978501 -0.398761
17 -0.896889 -1.308091 -0.693222 0.501341 -0.162478 -0.037095 -0.270506 0.346239 0.691093 -0.015841 -0.871109 -1.169731 0.270587
18 0.875184 0.085665 0.080385 0.242890 -0.938716 -1.140718 -0.097002 -0.428159 -0.490553 -0.095430 -0.233748 -0.812311 0.895950
19 -0.659140 -0.549349 -0.129098 1.107800 -3.028026 -0.963090 0.170241 -2.196582 -0.518279 1.312693 -1.543068 -0.398880 0.589793
20 -0.563764 -2.009854 0.074903 -0.464555 -1.109839 0.088831 -0.566914 0.384785 -0.625854 -0.723645 -1.000855 0.809858 0.131808
21 1.250269 0.010904 0.201698 -0.957619 -0.013951 1.101073 -0.075927 -0.502371 -1.866004 0.114645 0.156305 0.629372 1.366339
22 -1.190023 -0.268956 -0.555588 -0.621566 -0.230031 0.264870 -0.241968 0.258810 0.329697 0.359684 0.486221 0.532413 0.397267
23 -0.802365 0.152404 -0.567015 0.381822 0.592009 -0.562610 -1.563042 0.721323 2.941459 2.101624 0.519023 -0.366100 -0.253206
24 -0.893865 0.341962 -0.830315 0.329795 0.563212 -0.327526 -1.488529 0.513889 2.829533 2.088829 0.636581 -0.214855 -0.258980
25 -0.121562 1.347439 0.744281 0.698770 -0.746534 -0.657656 -0.027263 -0.447918 0.338675 1.028922 0.807479 0.808201 -0.400533
26 -1.615036 1.323285 0.625582 0.721012 -0.691466 -0.853918 -0.638037 -0.521595 0.602985 0.889454 -0.304429 -1.597203 -0.132420
27 -0.522333 1.534988 -0.017521 -0.056191 0.437400 1.304359 0.421225 -0.229724 0.856944 0.363658 -0.300702 0.199378 -0.091915
28 0.020009 -0.529617 -0.687843 -1.068170 -0.317981 -0.873679 2.545261 0.319549 0.389927 -1.527578 -0.515574 -1.761353 0.997758
29 0.294921 -0.175540 -2.170589 0.206338 0.608432 -2.666144 -0.240215 1.450295 1.100392 -1.316262 0.000504 -3.910703 -0.679615
... ... ... ... ... ... ... ... ... ... ... ... ... ...
195 -0.713121 -0.124100 0.184524 -0.289193 1.143155 -0.040337 -0.456140 0.386906 -0.409479 -0.013040 -0.448614 0.678472 0.856538
196 0.088904 -0.480222 0.314568 0.011773 -0.871197 -0.254444 -1.214378 0.074859 0.805154 0.107568 -0.101697 0.725634 -0.674412
197 -0.819978 -0.214030 0.603520 -0.224844 0.008606 0.221715 0.248298 0.580332 0.548519 1.036227 0.084133 -0.973833 -1.118049
198 0.089868 -0.642488 -0.163873 0.162352 -2.220413 -1.138568 -0.479671 2.364939 0.560742 -0.314032 1.123286 0.586946 1.028865
199 0.490718 -0.570100 -0.601226 -1.373455 -2.219961 -1.917807 -1.874507 2.571947 0.227763 -1.470442 0.797456 0.390489 0.443397
200 -0.725311 0.995058 1.698440 -0.409174 1.345314 -0.371509 0.853418 0.950027 0.524004 -0.776995 0.384800 0.564620 0.242242
201 -0.088956 0.381477 0.665527 0.871194 0.436489 0.159443 0.297415 0.889031 -0.725496 -0.079337 0.812380 0.888138 0.315317
202 -0.627833 0.625737 0.574533 0.351258 0.679287 0.045521 -0.005976 -0.356564 -0.077959 -0.043215 1.410034 0.680198 0.118446
203 -0.591412 -1.691897 -1.280443 0.004164 0.029231 1.050215 1.339125 0.772476 -0.548882 -0.875977 -0.739724 -0.002660 0.550573
204 -0.213359 -0.655305 -0.545729 -0.525488 -0.788705 -0.715509 0.006099 0.426857 0.380788 0.708127 1.161262 0.582157 0.085247
205 0.561065 -0.270819 -0.376934 1.362033 -0.711445 1.354311 -0.206743 -0.142823 1.553667 -0.536125 -1.684840 0.688414 -1.009051
206 0.905511 0.711971 1.427482 0.086910 -0.261378 0.561744 0.222402 -1.622424 0.024138 0.934010 -0.468465 0.170884 0.221140
207 0.067531 0.386370 -0.307384 -0.040460 -0.049760 -0.032394 0.431908 1.042755 -1.011654 -0.412244 -0.108096 0.184730 -0.138904
208 -0.059758 0.045608 -0.194712 -0.038341 -0.248229 0.218675 -0.402325 0.773505 0.041528 0.766170 -0.042900 0.536066 0.856860
209 0.486749 -0.756717 -0.874808 -0.729025 -1.528664 0.066851 -0.217409 2.535472 0.810467 2.007519 -1.633543 1.078687 -1.214695
210 -0.009918 0.023157 -0.108599 -0.353482 -0.525150 0.026241 -0.209046 0.952549 0.118271 0.761473 -0.021901 0.723007 1.176221
211 -0.852239 -0.126721 1.768756 -0.139569 1.457419 -1.896514 0.722738 -1.858343 1.023542 0.337423 -1.696471 0.797698 -1.801833
212 -1.605282 0.546705 -0.027523 -0.007901 0.390982 0.752113 0.108134 -0.532402 -0.658558 -0.655673 -0.110552 -0.038507 0.564082
213 -1.537486 0.438542 -0.054954 -0.009054 0.565426 0.944990 0.000999 -0.699569 -0.616522 -0.546167 0.075944 0.000029 0.772172
214 0.478176 -0.623588 -1.163628 -0.024044 -0.377051 0.114672 -1.189664 -0.599743 0.064422 -0.284247 0.793914 0.752339 -0.558744
215 -0.653553 -0.272142 0.596156 0.881373 -2.295187 0.283720 -0.193981 -0.067370 0.777762 -0.959991 -0.275185 0.990175 -2.038870
216 -0.746791 -0.229040 0.929885 0.869993 -2.913181 0.212781 -0.305148 -0.195613 1.117807 -0.935236 -0.012361 1.696083 -2.096406
217 -0.402132 0.567649 0.658617 1.252447 0.282722 -0.867962 -0.658417 -0.711252 0.412558 0.018922 -0.656841 -0.668848 -0.606249
218 0.253416 2.028743 -0.249389 -0.135717 -0.432099 -1.127803 0.330577 -0.150249 1.400036 -0.237307 -1.036935 0.796314 0.315125
219 -0.268737 0.304053 0.442309 1.466913 -0.190859 -1.339833 -0.412332 -0.934782 -0.340795 -0.803146 -0.913412 -1.601519 -0.895268
220 0.164485 1.445490 2.674724 -0.663649 -0.232015 1.428702 0.961717 -2.600552 -0.356496 0.842619 2.796380 0.923058 1.962832
221 0.702551 0.697481 0.141117 -0.647568 0.265119 0.543185 0.967290 -0.204736 -0.388782 -1.297479 1.446403 1.404421 0.191658
222 0.727431 0.078441 -0.027658 -0.293281 -0.116893 0.762821 -0.474665 -0.142512 -0.107789 0.767739 -0.106817 -0.016677 0.681705
223 -1.601068 0.456897 -0.776221 -0.202831 0.972321 1.554034 1.293388 0.533103 -0.658778 -0.011963 -0.965611 1.297730 2.334936
224 -0.775289 -1.780714 -0.773207 -0.130797 -0.258296 0.465109 0.964189 -0.054493 -0.318554 -0.116591 0.490944 0.263715 0.162778

225 rows × 13 columns

In [178]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[178]:
[2925.0,
 2654.4694551642833,
 2487.537140339455,
 2347.3385606542174,
 2241.219607532395,
 2187.984558812649,
 2078.2651666536785,
 1992.3748205985125,
 1984.6213758642439,
 1904.4598519281963,
 1850.0132981301954,
 1816.2106987200941,
 1761.4652710138919,
 1722.4326028221699]
In [179]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[179]:
[<matplotlib.lines.Line2D at 0x1e82f5d1048>]

K=2

In [180]:
kmeans_mfcc = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
Out[180]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [181]:
kmeans_mfcc.labels_
Out[181]:
array([1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1,
       0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0,
       0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1,
       0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
       1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1,
       0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,
       0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,
       0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 0])
In [182]:
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
Out[182]:
array([1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1,
       0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0,
       0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1,
       0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
       1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1,
       0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,
       0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,
       0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 0])
In [183]:
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
In [184]:
X
Out[184]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 -1.430409 -0.286045 0.490919 0.872836 -0.908379 -0.032724 -0.630149 -0.077256 -0.666116 -0.409507 -0.808189 -0.316827 1.001635 1 0
1 -0.282157 -1.478798 -1.125065 1.134727 0.294401 -1.552108 -2.048764 -0.332843 -0.298952 1.114161 0.274322 1.796602 2.309045 1 0
2 -0.419749 0.440468 0.853257 0.239250 -0.398831 -0.780274 -0.360447 -0.316940 -0.976474 0.879549 -0.457280 0.640345 0.645756 1 0
3 -0.431942 -1.517593 0.673149 0.786628 -1.306695 1.366669 1.142564 -2.034919 -0.374780 0.475572 -0.952521 0.198178 0.640593 1 0
4 0.011173 0.536562 -0.966199 -0.482951 0.331291 0.606478 0.539582 -0.270964 0.042364 -0.165555 0.123590 0.101357 -0.658096 0 0
5 0.126251 -0.590904 -1.517639 -1.072279 0.624234 1.023491 0.771080 0.598433 -0.513511 -0.860661 0.652573 1.694095 0.860230 0 0
6 0.830490 -0.982750 2.985373 1.132038 0.506664 3.816969 -0.181042 -0.077794 -0.404476 -0.632689 -0.730525 -0.299915 -0.619079 1 0
7 -0.461917 0.208363 0.278132 2.301636 1.667266 0.690336 1.117026 1.428294 1.041434 0.404890 -1.225161 -1.083934 0.065409 1 0
8 0.249199 -0.247143 -0.916299 0.679460 -0.342859 0.174455 0.125219 0.900110 0.761341 -0.398986 -1.063148 0.855251 2.099798 1 0
9 0.311144 1.364526 -0.004701 0.405862 -0.939024 0.796235 -0.589807 -0.084900 0.092344 -0.628186 -0.601718 -0.966679 0.086483 1 0
10 1.334587 0.242352 0.597807 1.994512 -0.050194 0.166573 -0.234755 0.602868 -1.535513 -3.732625 -0.901353 0.263896 -1.060994 1 0
11 1.318614 -0.797973 0.011175 -0.735851 -0.310309 -1.472949 -0.886459 0.062987 -0.118121 0.096635 0.405075 -1.082018 0.068160 0 0
12 0.824621 1.143049 0.195198 -0.036161 0.107483 -0.112574 -0.229915 -0.249936 0.727625 -0.048902 0.611366 0.128240 0.758492 1 0
13 -0.568472 -0.848858 0.090767 1.154534 -0.215999 -2.659688 -1.881352 0.412350 -0.790718 0.827656 0.539638 1.593572 2.002635 1 0
14 0.406968 1.201176 1.075155 -0.124700 0.135767 -0.178145 -0.204776 -1.618374 0.193121 0.693318 1.055785 -0.160648 0.537962 1 0
15 -0.287723 1.515404 0.539186 0.427811 -0.006824 0.845973 0.412855 -0.313048 0.703395 0.284501 -0.348641 -0.519018 -0.664237 1 0
16 0.522131 0.617054 0.277861 -0.836147 -1.629099 -0.005864 0.191114 0.043306 0.248219 0.814197 -0.101490 -0.978501 -0.398761 0 0
17 -0.896889 -1.308091 -0.693222 0.501341 -0.162478 -0.037095 -0.270506 0.346239 0.691093 -0.015841 -0.871109 -1.169731 0.270587 0 0
18 0.875184 0.085665 0.080385 0.242890 -0.938716 -1.140718 -0.097002 -0.428159 -0.490553 -0.095430 -0.233748 -0.812311 0.895950 1 0
19 -0.659140 -0.549349 -0.129098 1.107800 -3.028026 -0.963090 0.170241 -2.196582 -0.518279 1.312693 -1.543068 -0.398880 0.589793 1 0
20 -0.563764 -2.009854 0.074903 -0.464555 -1.109839 0.088831 -0.566914 0.384785 -0.625854 -0.723645 -1.000855 0.809858 0.131808 0 0
21 1.250269 0.010904 0.201698 -0.957619 -0.013951 1.101073 -0.075927 -0.502371 -1.866004 0.114645 0.156305 0.629372 1.366339 1 0
22 -1.190023 -0.268956 -0.555588 -0.621566 -0.230031 0.264870 -0.241968 0.258810 0.329697 0.359684 0.486221 0.532413 0.397267 0 0
23 -0.802365 0.152404 -0.567015 0.381822 0.592009 -0.562610 -1.563042 0.721323 2.941459 2.101624 0.519023 -0.366100 -0.253206 0 0
24 -0.893865 0.341962 -0.830315 0.329795 0.563212 -0.327526 -1.488529 0.513889 2.829533 2.088829 0.636581 -0.214855 -0.258980 0 0
25 -0.121562 1.347439 0.744281 0.698770 -0.746534 -0.657656 -0.027263 -0.447918 0.338675 1.028922 0.807479 0.808201 -0.400533 1 0
26 -1.615036 1.323285 0.625582 0.721012 -0.691466 -0.853918 -0.638037 -0.521595 0.602985 0.889454 -0.304429 -1.597203 -0.132420 1 0
27 -0.522333 1.534988 -0.017521 -0.056191 0.437400 1.304359 0.421225 -0.229724 0.856944 0.363658 -0.300702 0.199378 -0.091915 1 0
28 0.020009 -0.529617 -0.687843 -1.068170 -0.317981 -0.873679 2.545261 0.319549 0.389927 -1.527578 -0.515574 -1.761353 0.997758 0 0
29 0.294921 -0.175540 -2.170589 0.206338 0.608432 -2.666144 -0.240215 1.450295 1.100392 -1.316262 0.000504 -3.910703 -0.679615 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
195 -0.713121 -0.124100 0.184524 -0.289193 1.143155 -0.040337 -0.456140 0.386906 -0.409479 -0.013040 -0.448614 0.678472 0.856538 1 1
196 0.088904 -0.480222 0.314568 0.011773 -0.871197 -0.254444 -1.214378 0.074859 0.805154 0.107568 -0.101697 0.725634 -0.674412 0 1
197 -0.819978 -0.214030 0.603520 -0.224844 0.008606 0.221715 0.248298 0.580332 0.548519 1.036227 0.084133 -0.973833 -1.118049 0 1
198 0.089868 -0.642488 -0.163873 0.162352 -2.220413 -1.138568 -0.479671 2.364939 0.560742 -0.314032 1.123286 0.586946 1.028865 0 1
199 0.490718 -0.570100 -0.601226 -1.373455 -2.219961 -1.917807 -1.874507 2.571947 0.227763 -1.470442 0.797456 0.390489 0.443397 0 1
200 -0.725311 0.995058 1.698440 -0.409174 1.345314 -0.371509 0.853418 0.950027 0.524004 -0.776995 0.384800 0.564620 0.242242 1 1
201 -0.088956 0.381477 0.665527 0.871194 0.436489 0.159443 0.297415 0.889031 -0.725496 -0.079337 0.812380 0.888138 0.315317 1 1
202 -0.627833 0.625737 0.574533 0.351258 0.679287 0.045521 -0.005976 -0.356564 -0.077959 -0.043215 1.410034 0.680198 0.118446 1 1
203 -0.591412 -1.691897 -1.280443 0.004164 0.029231 1.050215 1.339125 0.772476 -0.548882 -0.875977 -0.739724 -0.002660 0.550573 0 1
204 -0.213359 -0.655305 -0.545729 -0.525488 -0.788705 -0.715509 0.006099 0.426857 0.380788 0.708127 1.161262 0.582157 0.085247 0 1
205 0.561065 -0.270819 -0.376934 1.362033 -0.711445 1.354311 -0.206743 -0.142823 1.553667 -0.536125 -1.684840 0.688414 -1.009051 0 1
206 0.905511 0.711971 1.427482 0.086910 -0.261378 0.561744 0.222402 -1.622424 0.024138 0.934010 -0.468465 0.170884 0.221140 1 1
207 0.067531 0.386370 -0.307384 -0.040460 -0.049760 -0.032394 0.431908 1.042755 -1.011654 -0.412244 -0.108096 0.184730 -0.138904 0 1
208 -0.059758 0.045608 -0.194712 -0.038341 -0.248229 0.218675 -0.402325 0.773505 0.041528 0.766170 -0.042900 0.536066 0.856860 1 1
209 0.486749 -0.756717 -0.874808 -0.729025 -1.528664 0.066851 -0.217409 2.535472 0.810467 2.007519 -1.633543 1.078687 -1.214695 0 1
210 -0.009918 0.023157 -0.108599 -0.353482 -0.525150 0.026241 -0.209046 0.952549 0.118271 0.761473 -0.021901 0.723007 1.176221 1 1
211 -0.852239 -0.126721 1.768756 -0.139569 1.457419 -1.896514 0.722738 -1.858343 1.023542 0.337423 -1.696471 0.797698 -1.801833 1 1
212 -1.605282 0.546705 -0.027523 -0.007901 0.390982 0.752113 0.108134 -0.532402 -0.658558 -0.655673 -0.110552 -0.038507 0.564082 1 1
213 -1.537486 0.438542 -0.054954 -0.009054 0.565426 0.944990 0.000999 -0.699569 -0.616522 -0.546167 0.075944 0.000029 0.772172 1 1
214 0.478176 -0.623588 -1.163628 -0.024044 -0.377051 0.114672 -1.189664 -0.599743 0.064422 -0.284247 0.793914 0.752339 -0.558744 0 1
215 -0.653553 -0.272142 0.596156 0.881373 -2.295187 0.283720 -0.193981 -0.067370 0.777762 -0.959991 -0.275185 0.990175 -2.038870 0 1
216 -0.746791 -0.229040 0.929885 0.869993 -2.913181 0.212781 -0.305148 -0.195613 1.117807 -0.935236 -0.012361 1.696083 -2.096406 0 1
217 -0.402132 0.567649 0.658617 1.252447 0.282722 -0.867962 -0.658417 -0.711252 0.412558 0.018922 -0.656841 -0.668848 -0.606249 1 1
218 0.253416 2.028743 -0.249389 -0.135717 -0.432099 -1.127803 0.330577 -0.150249 1.400036 -0.237307 -1.036935 0.796314 0.315125 1 1
219 -0.268737 0.304053 0.442309 1.466913 -0.190859 -1.339833 -0.412332 -0.934782 -0.340795 -0.803146 -0.913412 -1.601519 -0.895268 1 1
220 0.164485 1.445490 2.674724 -0.663649 -0.232015 1.428702 0.961717 -2.600552 -0.356496 0.842619 2.796380 0.923058 1.962832 1 1
221 0.702551 0.697481 0.141117 -0.647568 0.265119 0.543185 0.967290 -0.204736 -0.388782 -1.297479 1.446403 1.404421 0.191658 1 1
222 0.727431 0.078441 -0.027658 -0.293281 -0.116893 0.762821 -0.474665 -0.142512 -0.107789 0.767739 -0.106817 -0.016677 0.681705 1 1
223 -1.601068 0.456897 -0.776221 -0.202831 0.972321 1.554034 1.293388 0.533103 -0.658778 -0.011963 -0.965611 1.297730 2.334936 1 1
224 -0.775289 -1.780714 -0.773207 -0.130797 -0.258296 0.465109 0.964189 -0.054493 -0.318554 -0.116591 0.490944 0.263715 0.162778 0 1

225 rows × 15 columns

In [185]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[185]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82f5fadd8>

Tonal Centroid

In [310]:
df_n_ps_std[0].columns
Out[310]:
Index(['durationfiles', 'rmsfiles', 'rmsmedianfiles', 'lowenergyfiles',
       'ASRfiles', 'beatspectrumfiles', 'eventdensityfiles', 'tempofiles',
       'pulseclarityfiles', 'zerocrossfiles', 'rolloffsfiles',
       'brightnessfiles', 'spreadfiles', 'centroidfiles', 'kurtosisfiles',
       'flatnessfiles', 'entropyfiles', 'mfccfiles_1', 'mfccfiles_2',
       'mfccfiles_3', 'mfccfiles_4', 'mfccfiles_5', 'mfccfiles_6',
       'mfccfiles_7', 'mfccfiles_8', 'mfccfiles_9', 'mfccfiles_10',
       'mfccfiles_11', 'mfccfiles_12', 'mfccfiles_13', 'inharmonicityfiles',
       'bestkeyfiles', 'keyclarityfiles', 'modalityfiles',
       'tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
       'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6',
       'chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
       'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
       'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
       'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12',
       'attackslopefiles', 'attackleapfiles', 'chosen'],
      dtype='object')
In [311]:
df_n_ps_std[0].columns[34:40]
Out[311]:
Index(['tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
       'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6'],
      dtype='object')
In [312]:
df_n_ps_std_tc = [None]*len(companies)
for i in range(len(companies)):
    df_n_ps_std_tc[i] = pd.DataFrame(df_n_ps_std[i].iloc[:,34:40])
    df_n_ps_std_tc[i].columns=df_n_ps_std[i].columns[34:40]
df_n_ps_std_tc[0].info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 372 entries, 0 to 371
Data columns (total 6 columns):
tonalcentroidfiles_1    372 non-null float64
tonalcentroidfiles_2    372 non-null float64
tonalcentroidfiles_3    372 non-null float64
tonalcentroidfiles_4    372 non-null float64
tonalcentroidfiles_5    372 non-null float64
tonalcentroidfiles_6    372 non-null float64
dtypes: float64(6)
memory usage: 17.5 KB

Arte Francés

ANN

In [313]:
X = df_n_ps_std_tc[0]
In [314]:
y = df_n_ps[0]['chosen']
In [315]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [316]:
X_train.shape
Out[316]:
(279, 6)
In [193]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [194]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [195]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [196]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (30, 30, 30), 'learning_rate_init': 0.02, 'max_iter': 2000}, que permiten obtener un Accuracy de 78.85% y un Kappa del 15.23
Tiempo total: 15.53 minutos
In [317]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30,30,30]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.02
epochs = 2000
In [318]:
input_tensor = Input(shape = (n0,))
In [319]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [320]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [321]:
model.summary()
Model: "model_15"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_15 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_49 (Dense)             (None, 30)                210       
_________________________________________________________________
dense_50 (Dense)             (None, 30)                930       
_________________________________________________________________
dense_51 (Dense)             (None, 30)                930       
_________________________________________________________________
dense_52 (Dense)             (None, 1)                 31        
=================================================================
Total params: 2,101
Trainable params: 2,101
Non-trainable params: 0
_________________________________________________________________
In [322]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 279 samples, validate on 93 samples
Epoch 1/2000
279/279 [==============================] - 0s 795us/step - loss: 0.6163 - accuracy: 0.6810 - val_loss: 0.6134 - val_accuracy: 0.7312
Epoch 2/2000
279/279 [==============================] - 0s 57us/step - loss: 0.5435 - accuracy: 0.7778 - val_loss: 0.6137 - val_accuracy: 0.7204
Epoch 3/2000
279/279 [==============================] - 0s 54us/step - loss: 0.5501 - accuracy: 0.7742 - val_loss: 0.6025 - val_accuracy: 0.7312
Epoch 4/2000
279/279 [==============================] - 0s 61us/step - loss: 0.5280 - accuracy: 0.7814 - val_loss: 0.5825 - val_accuracy: 0.7419
Epoch 5/2000
279/279 [==============================] - 0s 54us/step - loss: 0.5163 - accuracy: 0.7670 - val_loss: 0.5768 - val_accuracy: 0.7312
Epoch 6/2000
279/279 [==============================] - 0s 57us/step - loss: 0.4872 - accuracy: 0.7849 - val_loss: 0.5917 - val_accuracy: 0.7312
Epoch 7/2000
279/279 [==============================] - 0s 75us/step - loss: 0.4670 - accuracy: 0.7957 - val_loss: 0.5862 - val_accuracy: 0.7204
Epoch 8/2000
279/279 [==============================] - 0s 72us/step - loss: 0.4556 - accuracy: 0.7993 - val_loss: 0.6149 - val_accuracy: 0.7204
Epoch 9/2000
279/279 [==============================] - 0s 68us/step - loss: 0.4441 - accuracy: 0.8100 - val_loss: 0.5995 - val_accuracy: 0.7204
Epoch 10/2000
279/279 [==============================] - 0s 64us/step - loss: 0.4241 - accuracy: 0.8315 - val_loss: 0.6087 - val_accuracy: 0.6882
Epoch 11/2000
279/279 [==============================] - 0s 57us/step - loss: 0.4078 - accuracy: 0.8136 - val_loss: 0.6182 - val_accuracy: 0.6989
Epoch 12/2000
279/279 [==============================] - 0s 57us/step - loss: 0.3746 - accuracy: 0.8387 - val_loss: 0.6277 - val_accuracy: 0.6989
Epoch 13/2000
279/279 [==============================] - 0s 57us/step - loss: 0.3551 - accuracy: 0.8710 - val_loss: 0.7196 - val_accuracy: 0.7204
Epoch 14/2000
279/279 [==============================] - 0s 57us/step - loss: 0.3640 - accuracy: 0.8530 - val_loss: 0.6662 - val_accuracy: 0.6774

Epoch 00014: ReduceLROnPlateau reducing learning rate to 0.009999999776482582.
Epoch 15/2000
279/279 [==============================] - 0s 57us/step - loss: 0.2963 - accuracy: 0.8781 - val_loss: 0.6874 - val_accuracy: 0.6989
Epoch 16/2000
279/279 [==============================] - 0s 82us/step - loss: 0.2739 - accuracy: 0.8781 - val_loss: 0.7054 - val_accuracy: 0.6882
Epoch 17/2000
279/279 [==============================] - 0s 72us/step - loss: 0.2490 - accuracy: 0.9176 - val_loss: 0.6976 - val_accuracy: 0.6989
Epoch 18/2000
279/279 [==============================] - 0s 72us/step - loss: 0.2295 - accuracy: 0.9032 - val_loss: 0.7744 - val_accuracy: 0.6882
Epoch 19/2000
279/279 [==============================] - 0s 61us/step - loss: 0.2130 - accuracy: 0.9283 - val_loss: 0.7568 - val_accuracy: 0.6667
Epoch 20/2000
279/279 [==============================] - 0s 64us/step - loss: 0.1941 - accuracy: 0.9427 - val_loss: 0.8372 - val_accuracy: 0.6774
Epoch 21/2000
279/279 [==============================] - 0s 61us/step - loss: 0.1811 - accuracy: 0.9319 - val_loss: 0.8294 - val_accuracy: 0.6452
Epoch 22/2000
279/279 [==============================] - 0s 64us/step - loss: 0.1768 - accuracy: 0.9462 - val_loss: 0.9309 - val_accuracy: 0.6774
Epoch 23/2000
279/279 [==============================] - 0s 82us/step - loss: 0.1713 - accuracy: 0.9427 - val_loss: 0.8720 - val_accuracy: 0.6774
Epoch 24/2000
279/279 [==============================] - 0s 61us/step - loss: 0.1431 - accuracy: 0.9462 - val_loss: 0.9373 - val_accuracy: 0.6667

Epoch 00024: ReduceLROnPlateau reducing learning rate to 0.004999999888241291.
Epoch 25/2000
279/279 [==============================] - 0s 57us/step - loss: 0.1293 - accuracy: 0.9677 - val_loss: 0.9036 - val_accuracy: 0.6559
Epoch 26/2000
279/279 [==============================] - 0s 61us/step - loss: 0.1134 - accuracy: 0.9677 - val_loss: 0.9900 - val_accuracy: 0.6882
Epoch 27/2000
279/279 [==============================] - 0s 57us/step - loss: 0.1019 - accuracy: 0.9677 - val_loss: 0.9436 - val_accuracy: 0.6667
Epoch 28/2000
279/279 [==============================] - 0s 72us/step - loss: 0.1001 - accuracy: 0.9749 - val_loss: 0.9939 - val_accuracy: 0.6774
Epoch 29/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0948 - accuracy: 0.9677 - val_loss: 1.0398 - val_accuracy: 0.6989
Epoch 30/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0832 - accuracy: 0.9713 - val_loss: 1.0118 - val_accuracy: 0.6667
Epoch 31/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0772 - accuracy: 0.9821 - val_loss: 1.0549 - val_accuracy: 0.6882
Epoch 32/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0739 - accuracy: 0.9785 - val_loss: 1.0726 - val_accuracy: 0.6667
Epoch 33/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0687 - accuracy: 0.9892 - val_loss: 1.1151 - val_accuracy: 0.6774
Epoch 34/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0661 - accuracy: 0.9821 - val_loss: 1.1437 - val_accuracy: 0.6882

Epoch 00034: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455.
Epoch 35/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0571 - accuracy: 0.9892 - val_loss: 1.1363 - val_accuracy: 0.6882
Epoch 36/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0568 - accuracy: 0.9928 - val_loss: 1.1436 - val_accuracy: 0.6989
Epoch 37/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0530 - accuracy: 0.9928 - val_loss: 1.1831 - val_accuracy: 0.7097
Epoch 38/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0504 - accuracy: 0.9892 - val_loss: 1.1861 - val_accuracy: 0.6882
Epoch 39/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0480 - accuracy: 0.9964 - val_loss: 1.2017 - val_accuracy: 0.6882
Epoch 40/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0463 - accuracy: 0.9964 - val_loss: 1.2189 - val_accuracy: 0.6989
Epoch 41/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0442 - accuracy: 0.9964 - val_loss: 1.2194 - val_accuracy: 0.6882
Epoch 42/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0428 - accuracy: 0.9964 - val_loss: 1.2455 - val_accuracy: 0.6989
Epoch 43/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0403 - accuracy: 0.9964 - val_loss: 1.2560 - val_accuracy: 0.6882
Epoch 44/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0390 - accuracy: 0.9964 - val_loss: 1.2598 - val_accuracy: 0.6774

Epoch 00044: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228.
Epoch 45/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0362 - accuracy: 0.9964 - val_loss: 1.2756 - val_accuracy: 0.6989
Epoch 46/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0356 - accuracy: 0.9964 - val_loss: 1.2850 - val_accuracy: 0.6989
Epoch 47/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0347 - accuracy: 0.9964 - val_loss: 1.2884 - val_accuracy: 0.6989
Epoch 48/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0338 - accuracy: 0.9964 - val_loss: 1.2949 - val_accuracy: 0.6989
Epoch 49/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0331 - accuracy: 0.9964 - val_loss: 1.3005 - val_accuracy: 0.6989
Epoch 50/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0321 - accuracy: 1.0000 - val_loss: 1.3091 - val_accuracy: 0.6989
Epoch 51/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0313 - accuracy: 1.0000 - val_loss: 1.3175 - val_accuracy: 0.6989
Epoch 52/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0308 - accuracy: 1.0000 - val_loss: 1.3265 - val_accuracy: 0.6989
Epoch 53/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0304 - accuracy: 1.0000 - val_loss: 1.3245 - val_accuracy: 0.6989
Epoch 54/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0295 - accuracy: 1.0000 - val_loss: 1.3377 - val_accuracy: 0.6989

Epoch 00054: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614.
Epoch 55/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0286 - accuracy: 1.0000 - val_loss: 1.3390 - val_accuracy: 0.6989
Epoch 56/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0283 - accuracy: 1.0000 - val_loss: 1.3404 - val_accuracy: 0.6989
Epoch 57/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0279 - accuracy: 1.0000 - val_loss: 1.3436 - val_accuracy: 0.6989
Epoch 58/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0276 - accuracy: 1.0000 - val_loss: 1.3453 - val_accuracy: 0.6989
Epoch 59/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0272 - accuracy: 1.0000 - val_loss: 1.3486 - val_accuracy: 0.6989
Epoch 60/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0268 - accuracy: 1.0000 - val_loss: 1.3547 - val_accuracy: 0.6989
Epoch 61/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0266 - accuracy: 1.0000 - val_loss: 1.3581 - val_accuracy: 0.6989
Epoch 62/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0263 - accuracy: 1.0000 - val_loss: 1.3635 - val_accuracy: 0.6989
Epoch 63/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0261 - accuracy: 1.0000 - val_loss: 1.3673 - val_accuracy: 0.7097
Epoch 64/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0257 - accuracy: 1.0000 - val_loss: 1.3713 - val_accuracy: 0.7097

Epoch 00064: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807.
Epoch 65/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0253 - accuracy: 1.0000 - val_loss: 1.3727 - val_accuracy: 0.7097
Epoch 66/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0252 - accuracy: 1.0000 - val_loss: 1.3727 - val_accuracy: 0.6989
Epoch 67/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0251 - accuracy: 1.0000 - val_loss: 1.3723 - val_accuracy: 0.6989
Epoch 68/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0249 - accuracy: 1.0000 - val_loss: 1.3740 - val_accuracy: 0.6989
Epoch 69/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0247 - accuracy: 1.0000 - val_loss: 1.3777 - val_accuracy: 0.6989
Epoch 70/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0246 - accuracy: 1.0000 - val_loss: 1.3802 - val_accuracy: 0.6989
Epoch 71/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0244 - accuracy: 1.0000 - val_loss: 1.3823 - val_accuracy: 0.6989
Epoch 72/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0243 - accuracy: 1.0000 - val_loss: 1.3839 - val_accuracy: 0.6989
Epoch 73/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0241 - accuracy: 1.0000 - val_loss: 1.3851 - val_accuracy: 0.6989
Epoch 74/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0240 - accuracy: 1.0000 - val_loss: 1.3854 - val_accuracy: 0.6989

Epoch 00074: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035.
Epoch 75/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0238 - accuracy: 1.0000 - val_loss: 1.3870 - val_accuracy: 0.6989
Epoch 76/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0237 - accuracy: 1.0000 - val_loss: 1.3878 - val_accuracy: 0.6989
Epoch 77/2000
279/279 [==============================] - ETA: 0s - loss: 0.0402 - accuracy: 1.00 - 0s 57us/step - loss: 0.0236 - accuracy: 1.0000 - val_loss: 1.3888 - val_accuracy: 0.6989
Epoch 78/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0236 - accuracy: 1.0000 - val_loss: 1.3901 - val_accuracy: 0.6989
Epoch 79/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0235 - accuracy: 1.0000 - val_loss: 1.3912 - val_accuracy: 0.6989
Epoch 80/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0234 - accuracy: 1.0000 - val_loss: 1.3928 - val_accuracy: 0.6989
Epoch 81/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0233 - accuracy: 1.0000 - val_loss: 1.3938 - val_accuracy: 0.6989
Epoch 82/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0233 - accuracy: 1.0000 - val_loss: 1.3943 - val_accuracy: 0.6989
Epoch 83/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0232 - accuracy: 1.0000 - val_loss: 1.3947 - val_accuracy: 0.6989
Epoch 84/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0232 - accuracy: 1.0000 - val_loss: 1.3944 - val_accuracy: 0.6989

Epoch 00084: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05.
Epoch 85/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0230 - accuracy: 1.0000 - val_loss: 1.3949 - val_accuracy: 0.6989
Epoch 86/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0230 - accuracy: 1.0000 - val_loss: 1.3958 - val_accuracy: 0.6989
Epoch 87/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0229 - accuracy: 1.0000 - val_loss: 1.3960 - val_accuracy: 0.6989
Epoch 88/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0229 - accuracy: 1.0000 - val_loss: 1.3967 - val_accuracy: 0.6989
Epoch 89/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0229 - accuracy: 1.0000 - val_loss: 1.3975 - val_accuracy: 0.6989
Epoch 90/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0228 - accuracy: 1.0000 - val_loss: 1.3982 - val_accuracy: 0.6989
Epoch 91/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0228 - accuracy: 1.0000 - val_loss: 1.3990 - val_accuracy: 0.6989
Epoch 92/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0228 - accuracy: 1.0000 - val_loss: 1.3999 - val_accuracy: 0.6989
Epoch 93/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0227 - accuracy: 1.0000 - val_loss: 1.4000 - val_accuracy: 0.6989
Epoch 94/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0227 - accuracy: 1.0000 - val_loss: 1.4007 - val_accuracy: 0.6989

Epoch 00094: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05.
Epoch 95/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0226 - accuracy: 1.0000 - val_loss: 1.4009 - val_accuracy: 0.6989
Epoch 96/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0226 - accuracy: 1.0000 - val_loss: 1.4010 - val_accuracy: 0.6989
Epoch 97/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0226 - accuracy: 1.0000 - val_loss: 1.4014 - val_accuracy: 0.6989
Epoch 98/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0226 - accuracy: 1.0000 - val_loss: 1.4017 - val_accuracy: 0.6989
Epoch 99/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0226 - accuracy: 1.0000 - val_loss: 1.4018 - val_accuracy: 0.6989
Epoch 100/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0225 - accuracy: 1.0000 - val_loss: 1.4022 - val_accuracy: 0.6989
Epoch 101/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0225 - accuracy: 1.0000 - val_loss: 1.4023 - val_accuracy: 0.6989
Epoch 102/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0225 - accuracy: 1.0000 - val_loss: 1.4027 - val_accuracy: 0.6989
Epoch 103/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0225 - accuracy: 1.0000 - val_loss: 1.4029 - val_accuracy: 0.6989
Epoch 104/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4029 - val_accuracy: 0.6989

Epoch 00104: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05.
Epoch 105/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4032 - val_accuracy: 0.6989
Epoch 106/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4033 - val_accuracy: 0.6989
Epoch 107/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4035 - val_accuracy: 0.6989
Epoch 108/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4036 - val_accuracy: 0.6989
Epoch 109/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4038 - val_accuracy: 0.6989
Epoch 110/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4038 - val_accuracy: 0.6989
Epoch 111/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4039 - val_accuracy: 0.6989
Epoch 112/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0224 - accuracy: 1.0000 - val_loss: 1.4041 - val_accuracy: 0.6989
Epoch 113/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4042 - val_accuracy: 0.6989
Epoch 114/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4043 - val_accuracy: 0.6989

Epoch 00114: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06.
Epoch 115/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4044 - val_accuracy: 0.6989
Epoch 116/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4045 - val_accuracy: 0.6989
Epoch 117/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4045 - val_accuracy: 0.6989
Epoch 118/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4046 - val_accuracy: 0.6989
Epoch 119/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4047 - val_accuracy: 0.6989
Epoch 120/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4048 - val_accuracy: 0.6989
Epoch 121/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4048 - val_accuracy: 0.6989
Epoch 122/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4049 - val_accuracy: 0.6989
Epoch 123/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4049 - val_accuracy: 0.6989
Epoch 124/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4050 - val_accuracy: 0.6989

Epoch 00124: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06.
Epoch 125/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4050 - val_accuracy: 0.6989
Epoch 126/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4051 - val_accuracy: 0.6989
Epoch 127/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4051 - val_accuracy: 0.6989
Epoch 128/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4052 - val_accuracy: 0.6989
Epoch 129/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4052 - val_accuracy: 0.6989
Epoch 130/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4052 - val_accuracy: 0.6989
Epoch 131/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 1.0000 - val_loss: 1.4053 - val_accuracy: 0.6989
Epoch 132/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4053 - val_accuracy: 0.6989
Epoch 133/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4054 - val_accuracy: 0.6989
Epoch 134/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4054 - val_accuracy: 0.6989

Epoch 00134: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06.
Epoch 135/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4054 - val_accuracy: 0.6989
Epoch 136/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4054 - val_accuracy: 0.6989
Epoch 137/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4055 - val_accuracy: 0.6989
Epoch 138/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4055 - val_accuracy: 0.6989
Epoch 139/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4055 - val_accuracy: 0.6989
Epoch 140/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4055 - val_accuracy: 0.6989
Epoch 141/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4056 - val_accuracy: 0.6989
Epoch 142/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4056 - val_accuracy: 0.6989
Epoch 143/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4056 - val_accuracy: 0.6989
Epoch 144/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4056 - val_accuracy: 0.6989

Epoch 00144: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06.
Epoch 145/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4056 - val_accuracy: 0.6989
Epoch 146/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 147/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 148/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 149/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 150/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 151/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 152/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 153/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 154/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989

Epoch 00154: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07.
Epoch 155/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 156/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4057 - val_accuracy: 0.6989
Epoch 157/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 158/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 159/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 160/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 161/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 162/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 163/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 164/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00164: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07.
Epoch 165/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 166/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 167/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 168/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 169/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 170/2000
279/279 [==============================] - ETA: 0s - loss: 0.0231 - accuracy: 1.00 - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 171/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 172/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 173/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 174/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00174: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07.
Epoch 175/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 176/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 177/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 178/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 179/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 180/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 181/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 182/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 183/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 184/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00184: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08.
Epoch 185/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 186/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 187/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 188/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 189/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 190/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 191/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 192/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 193/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 194/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00194: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08.
Epoch 195/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 196/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 197/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 198/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 199/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 200/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 201/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 202/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 203/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 204/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00204: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08.
Epoch 205/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 206/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 207/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 208/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 209/2000
279/279 [==============================] - ETA: 0s - loss: 0.0149 - accuracy: 1.00 - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 210/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 211/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 212/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 213/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 214/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00214: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09.
Epoch 215/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 216/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 217/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 218/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 219/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 220/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 221/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 222/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 223/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 224/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00224: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09.
Epoch 225/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 226/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 227/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 228/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 229/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 230/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 231/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 232/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 233/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 234/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00234: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09.
Epoch 235/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 236/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 237/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 238/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 239/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 240/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 241/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 242/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 243/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 244/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00244: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09.
Epoch 245/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 246/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 247/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 248/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 249/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 250/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 251/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 252/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 253/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 254/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00254: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10.
Epoch 255/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 256/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 257/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 258/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 259/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 260/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 261/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 262/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 263/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 264/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00264: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10.
Epoch 265/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 266/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 267/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 268/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 269/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 270/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 271/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 272/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 273/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 274/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00274: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10.
Epoch 275/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 276/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 277/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 278/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 279/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 280/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 281/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 282/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 283/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 284/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00284: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11.
Epoch 285/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 286/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 287/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 288/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 289/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 290/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 291/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 292/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 293/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 294/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00294: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11.
Epoch 295/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 296/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 297/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 298/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 299/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 300/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 301/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 302/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 303/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 304/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00304: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11.
Epoch 305/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 306/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 307/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 308/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 309/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 310/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 311/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 312/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 313/2000
279/279 [==============================] - ETA: 0s - loss: 0.0208 - accuracy: 1.00 - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 314/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00314: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12.
Epoch 315/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 316/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 317/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 318/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 319/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 320/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 321/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 322/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 323/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 324/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00324: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12.
Epoch 325/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 326/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 327/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 328/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 329/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 330/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 331/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 332/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 333/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 334/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00334: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12.
Epoch 335/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 336/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 337/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 338/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 339/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 340/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 341/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 342/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 343/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 344/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00344: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12.
Epoch 345/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 346/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 347/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 348/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 349/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 350/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 351/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 352/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 353/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 354/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00354: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13.
Epoch 355/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 356/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 357/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 358/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 359/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 360/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 361/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 362/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 363/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 364/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00364: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13.
Epoch 365/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 366/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 367/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 368/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 369/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 370/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 371/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 372/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 373/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 374/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00374: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13.
Epoch 375/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 376/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 377/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 378/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 379/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 380/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 381/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 382/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 383/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 384/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00384: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14.
Epoch 385/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 386/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 387/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 388/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 389/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 390/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 391/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 392/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 393/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 394/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00394: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14.
Epoch 395/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 396/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 397/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 398/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 399/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 400/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 401/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 402/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 403/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 404/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00404: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14.
Epoch 405/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 406/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 407/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 408/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 409/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 410/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 411/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 412/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 413/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 414/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00414: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15.
Epoch 415/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 416/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 417/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 418/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 419/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 420/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 421/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 422/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 423/2000
279/279 [==============================] - ETA: 0s - loss: 0.0257 - accuracy: 1.00 - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 424/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00424: ReduceLROnPlateau reducing learning rate to 4.5474734072206875e-15.
Epoch 425/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 426/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 427/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 428/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 429/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 430/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 431/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 432/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 433/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 434/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00434: ReduceLROnPlateau reducing learning rate to 2.2737367036103438e-15.
Epoch 435/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 436/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 437/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 438/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 439/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 440/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 441/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 442/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 443/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 444/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00444: ReduceLROnPlateau reducing learning rate to 1.1368683518051719e-15.
Epoch 445/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 446/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 447/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 448/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 449/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 450/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 451/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 452/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 453/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 454/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00454: ReduceLROnPlateau reducing learning rate to 5.684341759025859e-16.
Epoch 455/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 456/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 457/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 458/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 459/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 460/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 461/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 462/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 463/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 464/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00464: ReduceLROnPlateau reducing learning rate to 2.8421708795129297e-16.
Epoch 465/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 466/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 467/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 468/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 469/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 470/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 471/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 472/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 473/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 474/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00474: ReduceLROnPlateau reducing learning rate to 1.4210854397564648e-16.
Epoch 475/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 476/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 477/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 478/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 479/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 480/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 481/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 482/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 483/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 484/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00484: ReduceLROnPlateau reducing learning rate to 7.105427198782324e-17.
Epoch 485/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 486/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 487/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 488/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 489/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 490/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 491/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 492/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 493/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 494/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00494: ReduceLROnPlateau reducing learning rate to 3.552713599391162e-17.
Epoch 495/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 496/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 497/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 498/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 499/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 500/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 501/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 502/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 503/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 504/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00504: ReduceLROnPlateau reducing learning rate to 1.776356799695581e-17.
Epoch 505/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 506/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 507/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 508/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 509/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 510/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 511/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 512/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 513/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 514/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00514: ReduceLROnPlateau reducing learning rate to 8.881783998477905e-18.
Epoch 515/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 516/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 517/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 518/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 519/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 520/2000
279/279 [==============================] - 0s 50us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 521/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 522/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 523/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 524/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00524: ReduceLROnPlateau reducing learning rate to 4.440891999238953e-18.
Epoch 525/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 526/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 527/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 528/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 529/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 530/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 531/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 532/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 533/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 534/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00534: ReduceLROnPlateau reducing learning rate to 2.2204459996194763e-18.
Epoch 535/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 536/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 537/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 538/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 539/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 540/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 541/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 542/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 543/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 544/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00544: ReduceLROnPlateau reducing learning rate to 1.1102229998097382e-18.
Epoch 545/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 546/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 547/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 548/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 549/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 550/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 551/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 552/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 553/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 554/2000
279/279 [==============================] - 0s 54us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00554: ReduceLROnPlateau reducing learning rate to 5.551114999048691e-19.
Epoch 555/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 556/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 557/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 558/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 559/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 560/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 561/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 562/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 563/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 564/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00564: ReduceLROnPlateau reducing learning rate to 2.7755574995243454e-19.
Epoch 565/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 566/2000
279/279 [==============================] - 0s 64us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 567/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 568/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 569/2000
279/279 [==============================] - 0s 57us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 570/2000
279/279 [==============================] - 0s 68us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 571/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 572/2000
279/279 [==============================] - 0s 61us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 573/2000
279/279 [==============================] - 0s 143us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 574/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00574: ReduceLROnPlateau reducing learning rate to 1.3877787497621727e-19.
Epoch 575/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 576/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 577/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 578/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 579/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 580/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 581/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 582/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 583/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 584/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00584: ReduceLROnPlateau reducing learning rate to 6.938893748810864e-20.
Epoch 585/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 586/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 587/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 588/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 589/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 590/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 591/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 592/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 593/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 594/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00594: ReduceLROnPlateau reducing learning rate to 3.469446874405432e-20.
Epoch 595/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 596/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 597/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 598/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 599/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 600/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 601/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 602/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 603/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 604/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00604: ReduceLROnPlateau reducing learning rate to 1.734723437202716e-20.
Epoch 605/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 606/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 607/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 608/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 609/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 610/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 611/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 612/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 613/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 614/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00614: ReduceLROnPlateau reducing learning rate to 8.67361718601358e-21.
Epoch 615/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 616/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 617/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 618/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 619/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 620/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 621/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 622/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 623/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 624/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00624: ReduceLROnPlateau reducing learning rate to 4.33680859300679e-21.
Epoch 625/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 626/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 627/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 628/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 629/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 630/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 631/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 632/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 633/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 634/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00634: ReduceLROnPlateau reducing learning rate to 2.168404296503395e-21.
Epoch 635/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 636/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 637/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 638/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 639/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 640/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 641/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 642/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 643/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 644/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00644: ReduceLROnPlateau reducing learning rate to 1.0842021482516974e-21.
Epoch 645/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 646/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 647/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 648/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 649/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 650/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 651/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 652/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 653/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 654/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00654: ReduceLROnPlateau reducing learning rate to 5.421010741258487e-22.
Epoch 655/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 656/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 657/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 658/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 659/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 660/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 661/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 662/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 663/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 664/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00664: ReduceLROnPlateau reducing learning rate to 2.7105053706292436e-22.
Epoch 665/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 666/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 667/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 668/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 669/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 670/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 671/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 672/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 673/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 674/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00674: ReduceLROnPlateau reducing learning rate to 1.3552526853146218e-22.
Epoch 675/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 676/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 677/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 678/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 679/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 680/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 681/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 682/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 683/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 684/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00684: ReduceLROnPlateau reducing learning rate to 6.776263426573109e-23.
Epoch 685/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 686/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 687/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 688/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 689/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 690/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 691/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 692/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 693/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 694/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00694: ReduceLROnPlateau reducing learning rate to 3.3881317132865545e-23.
Epoch 695/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 696/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 697/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 698/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 699/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 700/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 701/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 702/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 703/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 704/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00704: ReduceLROnPlateau reducing learning rate to 1.6940658566432772e-23.
Epoch 705/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 706/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 707/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 708/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 709/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 710/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 711/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 712/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 713/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 714/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00714: ReduceLROnPlateau reducing learning rate to 8.470329283216386e-24.
Epoch 715/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 716/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 717/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 718/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 719/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 720/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 721/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 722/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 723/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 724/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00724: ReduceLROnPlateau reducing learning rate to 4.235164641608193e-24.
Epoch 725/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 726/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 727/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 728/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 729/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 730/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 731/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 732/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 733/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 734/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00734: ReduceLROnPlateau reducing learning rate to 2.1175823208040965e-24.
Epoch 735/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 736/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 737/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 738/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 739/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 740/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 741/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 742/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 743/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 744/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00744: ReduceLROnPlateau reducing learning rate to 1.0587911604020483e-24.
Epoch 745/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 746/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 747/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 748/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 749/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 750/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 751/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 752/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 753/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 754/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00754: ReduceLROnPlateau reducing learning rate to 5.293955802010241e-25.
Epoch 755/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 756/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 757/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 758/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 759/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 760/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 761/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 762/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 763/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 764/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00764: ReduceLROnPlateau reducing learning rate to 2.6469779010051207e-25.
Epoch 765/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 766/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 767/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 768/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 769/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 770/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 771/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 772/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 773/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 774/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00774: ReduceLROnPlateau reducing learning rate to 1.3234889505025603e-25.
Epoch 775/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 776/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 777/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 778/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 779/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 780/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 781/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 782/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 783/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 784/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00784: ReduceLROnPlateau reducing learning rate to 6.617444752512802e-26.
Epoch 785/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 786/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 787/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 788/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 789/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 790/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 791/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 792/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 793/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 794/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00794: ReduceLROnPlateau reducing learning rate to 3.308722376256401e-26.
Epoch 795/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 796/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 797/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 798/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 799/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 800/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 801/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 802/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 803/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 804/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00804: ReduceLROnPlateau reducing learning rate to 1.6543611881282004e-26.
Epoch 805/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 806/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 807/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 808/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 809/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 810/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 811/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 812/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 813/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 814/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00814: ReduceLROnPlateau reducing learning rate to 8.271805940641002e-27.
Epoch 815/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 816/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 817/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 818/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 819/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 820/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 821/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 822/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 823/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 824/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00824: ReduceLROnPlateau reducing learning rate to 4.135902970320501e-27.
Epoch 825/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 826/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 827/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 828/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 829/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 830/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 831/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 832/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 833/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 834/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00834: ReduceLROnPlateau reducing learning rate to 2.0679514851602505e-27.
Epoch 835/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 836/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 837/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 838/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 839/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 840/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 841/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 842/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 843/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 844/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00844: ReduceLROnPlateau reducing learning rate to 1.0339757425801253e-27.
Epoch 845/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 846/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 847/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 848/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 849/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 850/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 851/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 852/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 853/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 854/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00854: ReduceLROnPlateau reducing learning rate to 5.169878712900626e-28.
Epoch 855/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 856/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 857/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 858/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 859/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 860/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 861/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 862/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 863/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 864/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00864: ReduceLROnPlateau reducing learning rate to 2.584939356450313e-28.
Epoch 865/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 866/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 867/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 868/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 869/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 870/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 871/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 872/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 873/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 874/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00874: ReduceLROnPlateau reducing learning rate to 1.2924696782251566e-28.
Epoch 875/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 876/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 877/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 878/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 879/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 880/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 881/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 882/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 883/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 884/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00884: ReduceLROnPlateau reducing learning rate to 6.462348391125783e-29.
Epoch 885/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 886/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 887/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 888/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 889/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 890/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 891/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 892/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 893/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 894/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00894: ReduceLROnPlateau reducing learning rate to 3.2311741955628914e-29.
Epoch 895/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 896/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 897/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 898/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 899/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 900/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 901/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 902/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 903/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 904/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00904: ReduceLROnPlateau reducing learning rate to 1.6155870977814457e-29.
Epoch 905/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 906/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 907/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 908/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 909/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 910/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 911/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 912/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 913/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 914/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00914: ReduceLROnPlateau reducing learning rate to 8.077935488907229e-30.
Epoch 915/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 916/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 917/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 918/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 919/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 920/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 921/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 922/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 923/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 924/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00924: ReduceLROnPlateau reducing learning rate to 4.038967744453614e-30.
Epoch 925/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 926/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 927/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 928/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 929/2000
279/279 [==============================] - 0s 129us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 930/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 931/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 932/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 933/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 934/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00934: ReduceLROnPlateau reducing learning rate to 2.019483872226807e-30.
Epoch 935/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 936/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 937/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 938/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 939/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 940/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 941/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 942/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 943/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 944/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00944: ReduceLROnPlateau reducing learning rate to 1.0097419361134036e-30.
Epoch 945/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 946/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 947/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 948/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 949/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 950/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 951/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 952/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 953/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 954/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00954: ReduceLROnPlateau reducing learning rate to 5.048709680567018e-31.
Epoch 955/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 956/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 957/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 958/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 959/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 960/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 961/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 962/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 963/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 964/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00964: ReduceLROnPlateau reducing learning rate to 2.524354840283509e-31.
Epoch 965/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 966/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 967/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 968/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 969/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 970/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 971/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 972/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 973/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 974/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00974: ReduceLROnPlateau reducing learning rate to 1.2621774201417545e-31.
Epoch 975/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 976/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 977/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 978/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 979/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 980/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 981/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 982/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 983/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 984/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00984: ReduceLROnPlateau reducing learning rate to 6.310887100708772e-32.
Epoch 985/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 986/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 987/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 988/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 989/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 990/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 991/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 992/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 993/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 994/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 00994: ReduceLROnPlateau reducing learning rate to 3.155443550354386e-32.
Epoch 995/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 996/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 997/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 998/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 999/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1000/2000
279/279 [==============================] - 0s 129us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1001/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1002/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1003/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1004/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01004: ReduceLROnPlateau reducing learning rate to 1.577721775177193e-32.
Epoch 1005/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1006/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1007/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1008/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1009/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1010/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1011/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1012/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1013/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1014/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01014: ReduceLROnPlateau reducing learning rate to 7.888608875885965e-33.
Epoch 1015/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1016/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1017/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1018/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1019/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1020/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1021/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1022/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1023/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1024/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01024: ReduceLROnPlateau reducing learning rate to 3.944304437942983e-33.
Epoch 1025/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1026/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1027/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1028/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1029/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1030/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1031/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1032/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1033/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1034/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01034: ReduceLROnPlateau reducing learning rate to 1.9721522189714914e-33.
Epoch 1035/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1036/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1037/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1038/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1039/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1040/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1041/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1042/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1043/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1044/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01044: ReduceLROnPlateau reducing learning rate to 9.860761094857457e-34.
Epoch 1045/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1046/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1047/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1048/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1049/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1050/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1051/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1052/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1053/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1054/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01054: ReduceLROnPlateau reducing learning rate to 4.930380547428728e-34.
Epoch 1055/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1056/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1057/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1058/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1059/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1060/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1061/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1062/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1063/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1064/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01064: ReduceLROnPlateau reducing learning rate to 2.465190273714364e-34.
Epoch 1065/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1066/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1067/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1068/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1069/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1070/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1071/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1072/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1073/2000
279/279 [==============================] - ETA: 0s - loss: 0.0214 - accuracy: 1.00 - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1074/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01074: ReduceLROnPlateau reducing learning rate to 1.232595136857182e-34.
Epoch 1075/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1076/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1077/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1078/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1079/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1080/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1081/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1082/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1083/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1084/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01084: ReduceLROnPlateau reducing learning rate to 6.16297568428591e-35.
Epoch 1085/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1086/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1087/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1088/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1089/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1090/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1091/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1092/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1093/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1094/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01094: ReduceLROnPlateau reducing learning rate to 3.081487842142955e-35.
Epoch 1095/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1096/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1097/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1098/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1099/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1100/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1101/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1102/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1103/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1104/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01104: ReduceLROnPlateau reducing learning rate to 1.5407439210714776e-35.
Epoch 1105/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1106/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1107/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1108/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1109/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1110/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1111/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1112/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1113/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1114/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01114: ReduceLROnPlateau reducing learning rate to 7.703719605357388e-36.
Epoch 1115/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1116/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1117/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1118/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1119/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1120/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1121/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1122/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1123/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1124/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01124: ReduceLROnPlateau reducing learning rate to 3.851859802678694e-36.
Epoch 1125/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1126/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1127/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1128/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1129/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1130/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1131/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1132/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1133/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1134/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01134: ReduceLROnPlateau reducing learning rate to 1.925929901339347e-36.
Epoch 1135/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1136/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1137/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1138/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1139/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1140/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1141/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1142/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1143/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1144/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01144: ReduceLROnPlateau reducing learning rate to 9.629649506696735e-37.
Epoch 1145/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1146/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1147/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1148/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1149/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1150/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1151/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1152/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1153/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1154/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01154: ReduceLROnPlateau reducing learning rate to 4.8148247533483676e-37.
Epoch 1155/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1156/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1157/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1158/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1159/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1160/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1161/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1162/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1163/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1164/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01164: ReduceLROnPlateau reducing learning rate to 2.4074123766741838e-37.
Epoch 1165/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1166/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1167/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1168/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1169/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1170/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1171/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1172/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1173/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1174/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01174: ReduceLROnPlateau reducing learning rate to 1.2037061883370919e-37.
Epoch 1175/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1176/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1177/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1178/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1179/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1180/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1181/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1182/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1183/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1184/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01184: ReduceLROnPlateau reducing learning rate to 6.018530941685459e-38.
Epoch 1185/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1186/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1187/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1188/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1189/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1190/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1191/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1192/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1193/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1194/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01194: ReduceLROnPlateau reducing learning rate to 3.0092654708427297e-38.
Epoch 1195/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1196/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1197/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1198/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1199/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1200/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1201/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1202/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1203/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1204/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01204: ReduceLROnPlateau reducing learning rate to 1.5046327354213649e-38.
Epoch 1205/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1206/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1207/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1208/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1209/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1210/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1211/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1212/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1213/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1214/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01214: ReduceLROnPlateau reducing learning rate to 7.523163677106824e-39.
Epoch 1215/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1216/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1217/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1218/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1219/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1220/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1221/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1222/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1223/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1224/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01224: ReduceLROnPlateau reducing learning rate to 3.761581838553412e-39.
Epoch 1225/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1226/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1227/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1228/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1229/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1230/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1231/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1232/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1233/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1234/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01234: ReduceLROnPlateau reducing learning rate to 1.88079056895209e-39.
Epoch 1235/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1236/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1237/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1238/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1239/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1240/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1241/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1242/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1243/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1244/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01244: ReduceLROnPlateau reducing learning rate to 9.40395284476045e-40.
Epoch 1245/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1246/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1247/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1248/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1249/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1250/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1251/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1252/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1253/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1254/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01254: ReduceLROnPlateau reducing learning rate to 4.701972919134064e-40.
Epoch 1255/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1256/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1257/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1258/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1259/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1260/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1261/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1262/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1263/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1264/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01264: ReduceLROnPlateau reducing learning rate to 2.350986459567032e-40.
Epoch 1265/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1266/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1267/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1268/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1269/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1270/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1271/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1272/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1273/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1274/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01274: ReduceLROnPlateau reducing learning rate to 1.175493229783516e-40.
Epoch 1275/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1276/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1277/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1278/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1279/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1280/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1281/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1282/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1283/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1284/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01284: ReduceLROnPlateau reducing learning rate to 5.87746614891758e-41.
Epoch 1285/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1286/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1287/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1288/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1289/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1290/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1291/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1292/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1293/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1294/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01294: ReduceLROnPlateau reducing learning rate to 2.93873307445879e-41.
Epoch 1295/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1296/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1297/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1298/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1299/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1300/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1301/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1302/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1303/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1304/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01304: ReduceLROnPlateau reducing learning rate to 1.4694015696910032e-41.
Epoch 1305/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1306/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1307/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1308/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1309/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1310/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1311/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1312/2000
279/279 [==============================] - 0s 72us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1313/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1314/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01314: ReduceLROnPlateau reducing learning rate to 7.347007848455016e-42.
Epoch 1315/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1316/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1317/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1318/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1319/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1320/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1321/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1322/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1323/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1324/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01324: ReduceLROnPlateau reducing learning rate to 3.673503924227508e-42.
Epoch 1325/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1326/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1327/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1328/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1329/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1330/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1331/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1332/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1333/2000
279/279 [==============================] - 0s 129us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1334/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01334: ReduceLROnPlateau reducing learning rate to 1.8371022867298352e-42.
Epoch 1335/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1336/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1337/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1338/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1339/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1340/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1341/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1342/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1343/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1344/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01344: ReduceLROnPlateau reducing learning rate to 9.185511433649176e-43.
Epoch 1345/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1346/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1347/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1348/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1349/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1350/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1351/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1352/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1353/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1354/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01354: ReduceLROnPlateau reducing learning rate to 4.5962589629854e-43.
Epoch 1355/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1356/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1357/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1358/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1359/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1360/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1361/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1362/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1363/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1364/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01364: ReduceLROnPlateau reducing learning rate to 2.2981294814927e-43.
Epoch 1365/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1366/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1367/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1368/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1369/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1370/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1371/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1372/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1373/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1374/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01374: ReduceLROnPlateau reducing learning rate to 1.14906474074635e-43.
Epoch 1375/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1376/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1377/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1378/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1379/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1380/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1381/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1382/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1383/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1384/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01384: ReduceLROnPlateau reducing learning rate to 5.74532370373175e-44.
Epoch 1385/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1386/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1387/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1388/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1389/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1390/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1391/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1392/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1393/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1394/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01394: ReduceLROnPlateau reducing learning rate to 2.872661851865875e-44.
Epoch 1395/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1396/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1397/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1398/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1399/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1400/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1401/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1402/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1403/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1404/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01404: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-44.
Epoch 1405/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1406/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1407/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1408/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1409/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1410/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1411/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1412/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1413/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1414/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01414: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-45.
Epoch 1415/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1416/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1417/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1418/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1419/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1420/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1421/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1422/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1423/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1424/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01424: ReduceLROnPlateau reducing learning rate to 3.5032461608120427e-45.
Epoch 1425/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1426/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1427/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1428/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1429/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1430/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1431/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1432/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1433/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1434/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01434: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-45.
Epoch 1435/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1436/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1437/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1438/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1439/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1440/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1441/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1442/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1443/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1444/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989

Epoch 01444: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-46.
Epoch 1445/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1446/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1447/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1448/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1449/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1450/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1451/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1452/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1453/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1454/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1455/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1456/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1457/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1458/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1459/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1460/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1461/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1462/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1463/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1464/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1465/2000
279/279 [==============================] - ETA: 0s - loss: 0.0195 - accuracy: 1.00 - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1466/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1467/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1468/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1469/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1470/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1471/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1472/2000
279/279 [==============================] - 0s 133us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1473/2000
279/279 [==============================] - 0s 136us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1474/2000
279/279 [==============================] - 0s 136us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1475/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1476/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1477/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1478/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1479/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1480/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1481/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1482/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1483/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1484/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1485/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1486/2000
279/279 [==============================] - 0s 129us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1487/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1488/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1489/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1490/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1491/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1492/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1493/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1494/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1495/2000
279/279 [==============================] - 0s 154us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1496/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1497/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1498/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1499/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1500/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1501/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1502/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1503/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1504/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1505/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1506/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1507/2000
279/279 [==============================] - 0s 133us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1508/2000
279/279 [==============================] - 0s 140us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1509/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1510/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1511/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1512/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1513/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1514/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1515/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1516/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1517/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1518/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1519/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1520/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1521/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1522/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1523/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1524/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1525/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1526/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1527/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1528/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1529/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1530/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1531/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1532/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1533/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1534/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1535/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1536/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1537/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1538/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1539/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1540/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1541/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1542/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1543/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1544/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1545/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1546/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1547/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1548/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1549/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1550/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1551/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1552/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1553/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1554/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1555/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1556/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1557/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1558/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1559/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1560/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1561/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1562/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1563/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1564/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1565/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1566/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1567/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1568/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1569/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1570/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1571/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1572/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1573/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1574/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1575/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1576/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1577/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1578/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1579/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1580/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1581/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1582/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1583/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1584/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1585/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1586/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1587/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1588/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1589/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1590/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1591/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1592/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1593/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1594/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1595/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1596/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1597/2000
279/279 [==============================] - 0s 111us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1598/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1599/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1600/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1601/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1602/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1603/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1604/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1605/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1606/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1607/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1608/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1609/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1610/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1611/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1612/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1613/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1614/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1615/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1616/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1617/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1618/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1619/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1620/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1621/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1622/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1623/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1624/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1625/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1626/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1627/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1628/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1629/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1630/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1631/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1632/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1633/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1634/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1635/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1636/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1637/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1638/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1639/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1640/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1641/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1642/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1643/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1644/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1645/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1646/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1647/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1648/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1649/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1650/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1651/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1652/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1653/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1654/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1655/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1656/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1657/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1658/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1659/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1660/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1661/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1662/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1663/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1664/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1665/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1666/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1667/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1668/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1669/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1670/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1671/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1672/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1673/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1674/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1675/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1676/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1677/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1678/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1679/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1680/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1681/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1682/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1683/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1684/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1685/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1686/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1687/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1688/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1689/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1690/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1691/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1692/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1693/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1694/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1695/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1696/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1697/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1698/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1699/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1700/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1701/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1702/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1703/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1704/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1705/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1706/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1707/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1708/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1709/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1710/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1711/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1712/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1713/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1714/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1715/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1716/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1717/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1718/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1719/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1720/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1721/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1722/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1723/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1724/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1725/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1726/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1727/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1728/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1729/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1730/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1731/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1732/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1733/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1734/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1735/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1736/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1737/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1738/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1739/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1740/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1741/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1742/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1743/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1744/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1745/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1746/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1747/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1748/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1749/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1750/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1751/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1752/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1753/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1754/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1755/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1756/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1757/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1758/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1759/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1760/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1761/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1762/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1763/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1764/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1765/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1766/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1767/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1768/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1769/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1770/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1771/2000
279/279 [==============================] - 0s 122us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1772/2000
279/279 [==============================] - 0s 143us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1773/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1774/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1775/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1776/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1777/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1778/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1779/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1780/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1781/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1782/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1783/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1784/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1785/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1786/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1787/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1788/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1789/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1790/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1791/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1792/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1793/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1794/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1795/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1796/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1797/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1798/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1799/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1800/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1801/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1802/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1803/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1804/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1805/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1806/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1807/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1808/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1809/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1810/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1811/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1812/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1813/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1814/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1815/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1816/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1817/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1818/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1819/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1820/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1821/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1822/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1823/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1824/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1825/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1826/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1827/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1828/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1829/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1830/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1831/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1832/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1833/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1834/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1835/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1836/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1837/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1838/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1839/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1840/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1841/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1842/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1843/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1844/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1845/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1846/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1847/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1848/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1849/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1850/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1851/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1852/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1853/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1854/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1855/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1856/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1857/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1858/2000
279/279 [==============================] - 0s 97us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1859/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1860/2000
279/279 [==============================] - 0s 118us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1861/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1862/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1863/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1864/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1865/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1866/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1867/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1868/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1869/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1870/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1871/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1872/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1873/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1874/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1875/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1876/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1877/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1878/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1879/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1880/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1881/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1882/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1883/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1884/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1885/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1886/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1887/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1888/2000
279/279 [==============================] - 0s 115us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1889/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1890/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1891/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1892/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1893/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1894/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1895/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1896/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1897/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1898/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1899/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1900/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1901/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1902/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1903/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1904/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1905/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1906/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1907/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1908/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1909/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1910/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1911/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1912/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1913/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1914/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1915/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1916/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1917/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1918/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1919/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1920/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1921/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1922/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1923/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1924/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1925/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1926/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1927/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1928/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1929/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1930/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1931/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1932/2000
279/279 [==============================] - 0s 100us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1933/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1934/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1935/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1936/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1937/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1938/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1939/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1940/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1941/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1942/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1943/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1944/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1945/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1946/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1947/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1948/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1949/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1950/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1951/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1952/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1953/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1954/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1955/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1956/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1957/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1958/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1959/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1960/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1961/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1962/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1963/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1964/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1965/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1966/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1967/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1968/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1969/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1970/2000
279/279 [==============================] - 0s 129us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1971/2000
279/279 [==============================] - 0s 125us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1972/2000
279/279 [==============================] - 0s 107us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1973/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1974/2000
279/279 [==============================] - 0s 104us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1975/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1976/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1977/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1978/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1979/2000
279/279 [==============================] - 0s 75us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1980/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1981/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1982/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1983/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1984/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1985/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1986/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1987/2000
279/279 [==============================] - 0s 90us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1988/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1989/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1990/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1991/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1992/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1993/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1994/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1995/2000
279/279 [==============================] - 0s 82us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1996/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1997/2000
279/279 [==============================] - 0s 79us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1998/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 1999/2000
279/279 [==============================] - 0s 86us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
Epoch 2000/2000
279/279 [==============================] - 0s 93us/step - loss: 0.0222 - accuracy: 1.0000 - val_loss: 1.4058 - val_accuracy: 0.6989
In [323]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
In [324]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 43us/step
test loss: 1.405830939610799, test accuracy: 0.698924720287323
In [325]:
y_pred = model.predict(X_test)
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
Kappa:  0.1712285168682367
AUC ROC:  0.5791176470588235

KMeans

In [216]:
X
Out[216]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 1.574572 0.885785 -0.744040 1.222732 -1.020937 0.253925
1 1.110821 0.925768 0.226451 -0.119687 0.143401 -0.009902
2 -0.106107 1.808295 1.961049 1.107464 2.076449 1.966210
3 0.083078 1.385239 1.506771 1.294360 1.104665 2.105667
4 -0.164331 0.169248 0.525026 1.442347 1.710639 0.657093
5 -0.601767 0.100025 2.894764 -1.234721 1.277722 2.254758
6 -0.436923 -0.300019 1.146480 0.730610 -1.634206 -0.621955
7 -0.587280 1.210009 0.829948 0.235398 -0.830262 0.277419
8 -0.072320 0.439239 -0.084262 0.666161 -0.979709 0.134482
9 0.177494 0.386052 0.266785 -1.461050 1.702079 -0.627335
10 0.473878 0.893926 -0.138418 -0.267275 -0.407548 -0.229186
11 0.245249 0.482974 0.995106 0.679754 0.235560 0.480101
12 0.011981 -0.373717 -0.589054 0.487517 -1.428960 0.073724
13 0.190041 -0.273603 0.483229 0.925167 -1.268062 -0.057357
14 -1.064192 -0.043564 -1.313412 -1.204309 1.571772 -1.751836
15 -0.197842 0.773898 0.917595 -0.533388 1.883323 -0.803595
16 -0.610344 0.615674 0.638901 -1.525221 1.272377 -1.132221
17 0.804488 -1.479012 0.374228 0.166272 -1.743433 -1.002346
18 0.490692 -1.982800 -0.516405 -0.202546 -1.666137 -1.170162
19 0.592053 -1.492906 0.420008 -0.901877 -2.327543 -0.070308
20 -1.787738 1.285484 -0.785859 -2.380832 -1.022434 -0.395605
21 -1.742165 -0.033766 -0.693835 -0.641834 0.381590 -1.954523
22 -1.729041 0.173705 -1.150118 -0.130491 -1.173120 -1.443805
23 0.437142 1.722799 -2.129021 -2.481456 -0.156650 0.254809
24 1.350380 0.970678 0.076009 -0.404025 -1.384857 0.117089
25 0.496482 -0.133100 -0.887460 0.472889 -1.490365 1.615562
26 0.364827 1.228853 -0.931602 -0.240277 -0.555015 1.259771
27 1.022426 1.569202 -1.345165 -1.077121 -0.192695 0.678057
28 0.458228 1.620487 -0.211045 -1.256812 0.846741 -0.038512
29 0.607951 1.683390 -0.591685 -0.243413 -0.937265 2.063508
... ... ... ... ... ... ...
342 0.736233 -1.887137 0.319724 -0.489954 -0.346230 -0.577137
343 2.234360 -0.046651 1.657368 -1.085388 0.721768 -1.665035
344 0.911113 0.039319 -1.297534 0.592375 -0.241987 2.261413
345 1.384636 -0.476054 -1.183101 0.367974 0.257066 1.378080
346 0.961181 0.071015 -0.810140 0.868325 0.332780 1.588246
347 0.937051 -1.450311 -0.319702 -0.228450 0.739750 -0.449282
348 -3.281761 0.179415 -1.446642 -0.685115 -0.600794 0.658153
349 -2.344820 0.180118 0.106773 0.037180 -0.877178 1.123398
350 1.460135 0.906456 -0.147713 0.343900 -0.620485 0.700956
351 2.114961 0.965145 -1.378351 -0.574489 -1.693320 -0.016307
352 1.504768 0.846661 -1.860231 0.705179 -1.292241 0.593433
353 -0.213580 0.437840 0.427356 1.095762 -1.001309 -0.054969
354 -0.063245 0.024794 0.060996 0.129995 -0.599389 0.500376
355 0.116032 0.004823 0.235041 0.085659 -0.431386 0.814703
356 -0.330558 0.187426 -0.234587 0.942953 -0.434096 0.203910
357 0.380368 0.608654 -0.308912 1.154966 -0.255135 0.167498
358 0.893056 0.389669 -0.426997 0.638788 -0.700280 0.368667
359 -1.019517 -2.497618 0.166376 1.273368 0.313702 -0.420230
360 -0.784247 -1.148191 -0.802374 1.038236 -0.602589 -0.913446
361 0.139494 -2.036594 -1.137199 -0.379348 -0.257913 0.506162
362 1.494839 -2.022204 -0.164524 -2.180060 0.002713 1.550614
363 3.128156 -1.231830 0.035160 -2.785380 -0.610055 0.724620
364 2.010326 -1.482568 -1.697983 -2.728569 -0.765820 2.873139
365 1.499390 0.641291 -0.739018 -1.456660 -0.760400 -0.452027
366 1.398136 1.715250 -0.369182 -1.280480 -0.150680 -0.884280
367 1.103318 0.778728 -0.851121 -1.368219 0.142626 -0.918794
368 0.303168 0.188358 0.095953 -0.024506 -0.709672 -1.109607
369 1.183673 0.747660 -0.209307 0.329011 -1.151082 -0.726250
370 -0.723654 -0.290377 1.173636 -0.123624 1.997744 -0.687810
371 -0.073840 1.011128 0.445136 0.821330 -0.338478 -0.694080

372 rows × 6 columns

In [217]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[217]:
[2232.0,
 1860.320016741722,
 1609.923817414493,
 1413.3293251834796,
 1299.2594621943642,
 1187.9595776711437,
 1107.911820622218,
 1042.5800837127117,
 995.2457858732439,
 962.2198115266042,
 937.080734625301,
 898.72618782214,
 866.9993526105154,
 833.7799456003584]
In [218]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[218]:
[<matplotlib.lines.Line2D at 0x1e82f9e3828>]

K=4

In [219]:
kmeans_tc = KMeans(n_clusters=4, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[219]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=4, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [220]:
kmeans_tc.labels_
Out[220]:
array([2, 2, 0, 0, 0, 0, 1, 0, 0, 3, 2, 0, 1, 1, 3, 3, 3, 1, 1, 1, 3, 3,
       1, 3, 2, 2, 2, 2, 3, 2, 0, 0, 3, 3, 1, 1, 1, 1, 1, 1, 0, 0, 0, 3,
       3, 3, 2, 2, 2, 2, 2, 0, 1, 1, 0, 0, 0, 2, 2, 2, 3, 1, 3, 1, 3, 3,
       0, 0, 0, 3, 3, 3, 3, 2, 0, 2, 2, 0, 0, 0, 0, 0, 0, 2, 3, 3, 0, 0,
       2, 1, 0, 3, 2, 1, 1, 2, 0, 3, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0,
       3, 0, 0, 0, 3, 0, 2, 2, 2, 0, 3, 2, 0, 3, 2, 3, 1, 1, 1, 0, 2, 0,
       3, 3, 3, 3, 1, 2, 0, 1, 0, 1, 1, 2, 1, 0, 1, 1, 0, 0, 0, 3, 0, 2,
       0, 0, 0, 1, 1, 2, 0, 0, 0, 1, 1, 1, 2, 2, 1, 3, 2, 3, 0, 1, 1, 2,
       0, 1, 1, 1, 1, 2, 1, 2, 2, 3, 3, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 1,
       2, 1, 1, 3, 0, 1, 2, 3, 2, 2, 3, 2, 0, 2, 2, 1, 0, 1, 0, 1, 2, 1,
       2, 3, 3, 3, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 2, 3, 2, 3, 3, 3, 3, 3,
       3, 3, 3, 1, 0, 0, 2, 2, 2, 1, 2, 2, 3, 3, 3, 1, 2, 2, 3, 3, 3, 3,
       2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 2, 1, 3, 2, 0, 2, 2, 3, 3, 3, 3, 1,
       0, 2, 2, 3, 2, 0, 3, 1, 3, 2, 1, 0, 0, 3, 0, 0, 0, 2, 0, 1, 1, 0,
       0, 0, 3, 0, 0, 1, 1, 3, 1, 1, 3, 0, 2, 2, 1, 1, 1, 1, 1, 1, 2, 0,
       0, 3, 0, 0, 0, 2, 2, 3, 2, 1, 1, 1, 1, 3, 2, 2, 2, 1, 0, 0, 2, 2,
       2, 0, 0, 0, 0, 2, 2, 1, 1, 1, 2, 2, 2, 2, 3, 3, 1, 2, 3, 0])
In [221]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[221]:
array([2, 2, 0, 0, 0, 0, 1, 0, 0, 3, 2, 0, 1, 1, 3, 3, 3, 1, 1, 1, 3, 3,
       1, 3, 2, 2, 2, 2, 3, 2, 0, 0, 3, 3, 1, 1, 1, 1, 1, 1, 0, 0, 0, 3,
       3, 3, 2, 2, 2, 2, 2, 0, 1, 1, 0, 0, 0, 2, 2, 2, 3, 1, 3, 1, 3, 3,
       0, 0, 0, 3, 3, 3, 3, 2, 0, 2, 2, 0, 0, 0, 0, 0, 0, 2, 3, 3, 0, 0,
       2, 1, 0, 3, 2, 1, 1, 2, 0, 3, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0,
       3, 0, 0, 0, 3, 0, 2, 2, 2, 0, 3, 2, 0, 3, 2, 3, 1, 1, 1, 0, 2, 0,
       3, 3, 3, 3, 1, 2, 0, 1, 0, 1, 1, 2, 1, 0, 1, 1, 0, 0, 0, 3, 0, 2,
       0, 0, 0, 1, 1, 2, 0, 0, 0, 1, 1, 1, 2, 2, 1, 3, 2, 3, 0, 1, 1, 2,
       0, 1, 1, 1, 1, 2, 1, 2, 2, 3, 3, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 1,
       2, 1, 1, 3, 0, 1, 2, 3, 2, 2, 3, 2, 0, 2, 2, 1, 0, 1, 0, 1, 2, 1,
       2, 3, 3, 3, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 2, 3, 2, 3, 3, 3, 3, 3,
       3, 3, 3, 1, 0, 0, 2, 2, 2, 1, 2, 2, 3, 3, 3, 1, 2, 2, 3, 3, 3, 3,
       2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 2, 1, 3, 2, 0, 2, 2, 3, 3, 3, 3, 1,
       0, 2, 2, 3, 2, 0, 3, 1, 3, 2, 1, 0, 0, 3, 0, 0, 0, 2, 0, 1, 1, 0,
       0, 0, 3, 0, 0, 1, 1, 3, 1, 1, 3, 0, 2, 2, 1, 1, 1, 1, 1, 1, 2, 0,
       0, 3, 0, 0, 0, 2, 2, 3, 2, 1, 1, 1, 1, 3, 2, 2, 2, 1, 0, 0, 2, 2,
       2, 0, 0, 0, 0, 2, 2, 1, 1, 1, 2, 2, 2, 2, 3, 3, 1, 2, 3, 0])
In [222]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [223]:
X
Out[223]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 1.574572 0.885785 -0.744040 1.222732 -1.020937 0.253925 2 0
1 1.110821 0.925768 0.226451 -0.119687 0.143401 -0.009902 2 0
2 -0.106107 1.808295 1.961049 1.107464 2.076449 1.966210 0 0
3 0.083078 1.385239 1.506771 1.294360 1.104665 2.105667 0 0
4 -0.164331 0.169248 0.525026 1.442347 1.710639 0.657093 0 0
5 -0.601767 0.100025 2.894764 -1.234721 1.277722 2.254758 0 0
6 -0.436923 -0.300019 1.146480 0.730610 -1.634206 -0.621955 1 0
7 -0.587280 1.210009 0.829948 0.235398 -0.830262 0.277419 0 0
8 -0.072320 0.439239 -0.084262 0.666161 -0.979709 0.134482 0 0
9 0.177494 0.386052 0.266785 -1.461050 1.702079 -0.627335 3 0
10 0.473878 0.893926 -0.138418 -0.267275 -0.407548 -0.229186 2 0
11 0.245249 0.482974 0.995106 0.679754 0.235560 0.480101 0 0
12 0.011981 -0.373717 -0.589054 0.487517 -1.428960 0.073724 1 0
13 0.190041 -0.273603 0.483229 0.925167 -1.268062 -0.057357 1 0
14 -1.064192 -0.043564 -1.313412 -1.204309 1.571772 -1.751836 3 0
15 -0.197842 0.773898 0.917595 -0.533388 1.883323 -0.803595 3 0
16 -0.610344 0.615674 0.638901 -1.525221 1.272377 -1.132221 3 0
17 0.804488 -1.479012 0.374228 0.166272 -1.743433 -1.002346 1 0
18 0.490692 -1.982800 -0.516405 -0.202546 -1.666137 -1.170162 1 0
19 0.592053 -1.492906 0.420008 -0.901877 -2.327543 -0.070308 1 0
20 -1.787738 1.285484 -0.785859 -2.380832 -1.022434 -0.395605 3 0
21 -1.742165 -0.033766 -0.693835 -0.641834 0.381590 -1.954523 3 0
22 -1.729041 0.173705 -1.150118 -0.130491 -1.173120 -1.443805 1 0
23 0.437142 1.722799 -2.129021 -2.481456 -0.156650 0.254809 3 0
24 1.350380 0.970678 0.076009 -0.404025 -1.384857 0.117089 2 0
25 0.496482 -0.133100 -0.887460 0.472889 -1.490365 1.615562 2 0
26 0.364827 1.228853 -0.931602 -0.240277 -0.555015 1.259771 2 0
27 1.022426 1.569202 -1.345165 -1.077121 -0.192695 0.678057 2 0
28 0.458228 1.620487 -0.211045 -1.256812 0.846741 -0.038512 3 0
29 0.607951 1.683390 -0.591685 -0.243413 -0.937265 2.063508 2 0
... ... ... ... ... ... ... ... ...
342 0.736233 -1.887137 0.319724 -0.489954 -0.346230 -0.577137 1 1
343 2.234360 -0.046651 1.657368 -1.085388 0.721768 -1.665035 3 1
344 0.911113 0.039319 -1.297534 0.592375 -0.241987 2.261413 2 1
345 1.384636 -0.476054 -1.183101 0.367974 0.257066 1.378080 2 1
346 0.961181 0.071015 -0.810140 0.868325 0.332780 1.588246 2 1
347 0.937051 -1.450311 -0.319702 -0.228450 0.739750 -0.449282 1 1
348 -3.281761 0.179415 -1.446642 -0.685115 -0.600794 0.658153 0 1
349 -2.344820 0.180118 0.106773 0.037180 -0.877178 1.123398 0 1
350 1.460135 0.906456 -0.147713 0.343900 -0.620485 0.700956 2 1
351 2.114961 0.965145 -1.378351 -0.574489 -1.693320 -0.016307 2 1
352 1.504768 0.846661 -1.860231 0.705179 -1.292241 0.593433 2 1
353 -0.213580 0.437840 0.427356 1.095762 -1.001309 -0.054969 0 1
354 -0.063245 0.024794 0.060996 0.129995 -0.599389 0.500376 0 1
355 0.116032 0.004823 0.235041 0.085659 -0.431386 0.814703 0 1
356 -0.330558 0.187426 -0.234587 0.942953 -0.434096 0.203910 0 1
357 0.380368 0.608654 -0.308912 1.154966 -0.255135 0.167498 2 1
358 0.893056 0.389669 -0.426997 0.638788 -0.700280 0.368667 2 1
359 -1.019517 -2.497618 0.166376 1.273368 0.313702 -0.420230 1 1
360 -0.784247 -1.148191 -0.802374 1.038236 -0.602589 -0.913446 1 1
361 0.139494 -2.036594 -1.137199 -0.379348 -0.257913 0.506162 1 1
362 1.494839 -2.022204 -0.164524 -2.180060 0.002713 1.550614 2 1
363 3.128156 -1.231830 0.035160 -2.785380 -0.610055 0.724620 2 1
364 2.010326 -1.482568 -1.697983 -2.728569 -0.765820 2.873139 2 1
365 1.499390 0.641291 -0.739018 -1.456660 -0.760400 -0.452027 2 1
366 1.398136 1.715250 -0.369182 -1.280480 -0.150680 -0.884280 3 1
367 1.103318 0.778728 -0.851121 -1.368219 0.142626 -0.918794 3 1
368 0.303168 0.188358 0.095953 -0.024506 -0.709672 -1.109607 1 1
369 1.183673 0.747660 -0.209307 0.329011 -1.151082 -0.726250 2 1
370 -0.723654 -0.290377 1.173636 -0.123624 1.997744 -0.687810 3 1
371 -0.073840 1.011128 0.445136 0.821330 -0.338478 -0.694080 0 1

372 rows × 8 columns

In [224]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[224]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82fa1f9e8>
In [326]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[1]))

Club De Banqueros y Empresarios

ANN

In [327]:
X = df_n_ps_std_tc[1]
In [328]:
y = df_n_ps[1]['chosen']
In [329]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [330]:
X_train.shape
Out[330]:
(191, 6)
In [229]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [230]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [231]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [232]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'logistic', 'hidden_layer_sizes': (10,), 'learning_rate_init': 0.004, 'max_iter': 400}, que permiten obtener un Accuracy de 77.49% y un Kappa del 29.13
Tiempo total: 27.26 minutos
In [331]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.004
epochs = 400
In [332]:
input_tensor = Input(shape = (n0,))
In [333]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [334]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [335]:
model.summary()
Model: "model_16"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_16 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_53 (Dense)             (None, 10)                70        
_________________________________________________________________
dense_54 (Dense)             (None, 1)                 11        
=================================================================
Total params: 81
Trainable params: 81
Non-trainable params: 0
_________________________________________________________________
In [336]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 191 samples, validate on 64 samples
Epoch 1/400
191/191 [==============================] - 0s 691us/step - loss: 0.7329 - accuracy: 0.4346 - val_loss: 0.6526 - val_accuracy: 0.6250
Epoch 2/400
191/191 [==============================] - 0s 63us/step - loss: 0.6918 - accuracy: 0.5340 - val_loss: 0.6515 - val_accuracy: 0.6250
Epoch 3/400
191/191 [==============================] - 0s 52us/step - loss: 0.6588 - accuracy: 0.6754 - val_loss: 0.6536 - val_accuracy: 0.6406
Epoch 4/400
191/191 [==============================] - 0s 58us/step - loss: 0.6310 - accuracy: 0.7225 - val_loss: 0.6568 - val_accuracy: 0.6406
Epoch 5/400
191/191 [==============================] - 0s 52us/step - loss: 0.6102 - accuracy: 0.7277 - val_loss: 0.6612 - val_accuracy: 0.6094
Epoch 6/400
191/191 [==============================] - 0s 58us/step - loss: 0.5927 - accuracy: 0.7435 - val_loss: 0.6655 - val_accuracy: 0.6094
Epoch 7/400
191/191 [==============================] - 0s 52us/step - loss: 0.5802 - accuracy: 0.7330 - val_loss: 0.6705 - val_accuracy: 0.6250
Epoch 8/400
191/191 [==============================] - 0s 63us/step - loss: 0.5688 - accuracy: 0.7225 - val_loss: 0.6735 - val_accuracy: 0.6406
Epoch 9/400
191/191 [==============================] - 0s 58us/step - loss: 0.5619 - accuracy: 0.7277 - val_loss: 0.6773 - val_accuracy: 0.6406
Epoch 10/400
191/191 [==============================] - 0s 52us/step - loss: 0.5544 - accuracy: 0.7225 - val_loss: 0.6793 - val_accuracy: 0.6406
Epoch 11/400
191/191 [==============================] - 0s 52us/step - loss: 0.5482 - accuracy: 0.7330 - val_loss: 0.6820 - val_accuracy: 0.6406
Epoch 12/400
191/191 [==============================] - 0s 52us/step - loss: 0.5439 - accuracy: 0.7382 - val_loss: 0.6808 - val_accuracy: 0.6406
Epoch 13/400
191/191 [==============================] - 0s 47us/step - loss: 0.5396 - accuracy: 0.7487 - val_loss: 0.6820 - val_accuracy: 0.6719
Epoch 14/400
191/191 [==============================] - 0s 52us/step - loss: 0.5368 - accuracy: 0.7487 - val_loss: 0.6822 - val_accuracy: 0.6875
Epoch 15/400
191/191 [==============================] - 0s 52us/step - loss: 0.5339 - accuracy: 0.7487 - val_loss: 0.6824 - val_accuracy: 0.6875
Epoch 16/400
191/191 [==============================] - 0s 52us/step - loss: 0.5321 - accuracy: 0.7487 - val_loss: 0.6854 - val_accuracy: 0.6875
Epoch 17/400
191/191 [==============================] - 0s 58us/step - loss: 0.5302 - accuracy: 0.7487 - val_loss: 0.6852 - val_accuracy: 0.7031
Epoch 18/400
191/191 [==============================] - 0s 63us/step - loss: 0.5283 - accuracy: 0.7592 - val_loss: 0.6866 - val_accuracy: 0.6875
Epoch 19/400
191/191 [==============================] - 0s 52us/step - loss: 0.5269 - accuracy: 0.7644 - val_loss: 0.6868 - val_accuracy: 0.6875
Epoch 20/400
191/191 [==============================] - 0s 58us/step - loss: 0.5255 - accuracy: 0.7696 - val_loss: 0.6862 - val_accuracy: 0.6875
Epoch 21/400
191/191 [==============================] - 0s 52us/step - loss: 0.5243 - accuracy: 0.7696 - val_loss: 0.6876 - val_accuracy: 0.7031
Epoch 22/400
191/191 [==============================] - 0s 52us/step - loss: 0.5230 - accuracy: 0.7592 - val_loss: 0.6868 - val_accuracy: 0.7031
Epoch 23/400
191/191 [==============================] - 0s 58us/step - loss: 0.5218 - accuracy: 0.7592 - val_loss: 0.6851 - val_accuracy: 0.6875
Epoch 24/400
191/191 [==============================] - 0s 52us/step - loss: 0.5210 - accuracy: 0.7592 - val_loss: 0.6850 - val_accuracy: 0.6875
Epoch 25/400
191/191 [==============================] - 0s 58us/step - loss: 0.5197 - accuracy: 0.7592 - val_loss: 0.6881 - val_accuracy: 0.6875
Epoch 26/400
191/191 [==============================] - 0s 58us/step - loss: 0.5187 - accuracy: 0.7592 - val_loss: 0.6851 - val_accuracy: 0.6719
Epoch 27/400
191/191 [==============================] - 0s 52us/step - loss: 0.5172 - accuracy: 0.7539 - val_loss: 0.6843 - val_accuracy: 0.6875

Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 28/400
191/191 [==============================] - 0s 52us/step - loss: 0.5162 - accuracy: 0.7644 - val_loss: 0.6851 - val_accuracy: 0.6875
Epoch 29/400
191/191 [==============================] - 0s 68us/step - loss: 0.5159 - accuracy: 0.7644 - val_loss: 0.6847 - val_accuracy: 0.6875
Epoch 30/400
191/191 [==============================] - 0s 78us/step - loss: 0.5152 - accuracy: 0.7539 - val_loss: 0.6848 - val_accuracy: 0.6875
Epoch 31/400
191/191 [==============================] - 0s 63us/step - loss: 0.5147 - accuracy: 0.7539 - val_loss: 0.6859 - val_accuracy: 0.6875
Epoch 32/400
191/191 [==============================] - 0s 84us/step - loss: 0.5142 - accuracy: 0.7487 - val_loss: 0.6866 - val_accuracy: 0.6875
Epoch 33/400
191/191 [==============================] - 0s 63us/step - loss: 0.5136 - accuracy: 0.7487 - val_loss: 0.6859 - val_accuracy: 0.6875
Epoch 34/400
191/191 [==============================] - 0s 89us/step - loss: 0.5130 - accuracy: 0.7487 - val_loss: 0.6860 - val_accuracy: 0.6875
Epoch 35/400
191/191 [==============================] - 0s 89us/step - loss: 0.5125 - accuracy: 0.7487 - val_loss: 0.6856 - val_accuracy: 0.6875
Epoch 36/400
191/191 [==============================] - 0s 78us/step - loss: 0.5121 - accuracy: 0.7539 - val_loss: 0.6865 - val_accuracy: 0.6875
Epoch 37/400
191/191 [==============================] - 0s 68us/step - loss: 0.5115 - accuracy: 0.7539 - val_loss: 0.6854 - val_accuracy: 0.6719

Epoch 00037: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 38/400
191/191 [==============================] - 0s 58us/step - loss: 0.5108 - accuracy: 0.7539 - val_loss: 0.6854 - val_accuracy: 0.6719
Epoch 39/400
191/191 [==============================] - 0s 58us/step - loss: 0.5105 - accuracy: 0.7487 - val_loss: 0.6852 - val_accuracy: 0.6719
Epoch 40/400
191/191 [==============================] - 0s 58us/step - loss: 0.5102 - accuracy: 0.7539 - val_loss: 0.6857 - val_accuracy: 0.6719
Epoch 41/400
191/191 [==============================] - 0s 58us/step - loss: 0.5099 - accuracy: 0.7539 - val_loss: 0.6859 - val_accuracy: 0.6719
Epoch 42/400
191/191 [==============================] - 0s 58us/step - loss: 0.5096 - accuracy: 0.7539 - val_loss: 0.6858 - val_accuracy: 0.6719
Epoch 43/400
191/191 [==============================] - 0s 58us/step - loss: 0.5095 - accuracy: 0.7487 - val_loss: 0.6846 - val_accuracy: 0.6719
Epoch 44/400
191/191 [==============================] - 0s 58us/step - loss: 0.5091 - accuracy: 0.7539 - val_loss: 0.6850 - val_accuracy: 0.6719
Epoch 45/400
191/191 [==============================] - 0s 63us/step - loss: 0.5089 - accuracy: 0.7539 - val_loss: 0.6851 - val_accuracy: 0.6719
Epoch 46/400
191/191 [==============================] - 0s 58us/step - loss: 0.5085 - accuracy: 0.7539 - val_loss: 0.6851 - val_accuracy: 0.6719
Epoch 47/400
191/191 [==============================] - 0s 63us/step - loss: 0.5083 - accuracy: 0.7539 - val_loss: 0.6848 - val_accuracy: 0.6719

Epoch 00047: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 48/400
191/191 [==============================] - 0s 58us/step - loss: 0.5079 - accuracy: 0.7487 - val_loss: 0.6850 - val_accuracy: 0.6719
Epoch 49/400
191/191 [==============================] - 0s 52us/step - loss: 0.5077 - accuracy: 0.7539 - val_loss: 0.6850 - val_accuracy: 0.6719
Epoch 50/400
191/191 [==============================] - 0s 58us/step - loss: 0.5076 - accuracy: 0.7539 - val_loss: 0.6852 - val_accuracy: 0.6719
Epoch 51/400
191/191 [==============================] - 0s 63us/step - loss: 0.5075 - accuracy: 0.7487 - val_loss: 0.6853 - val_accuracy: 0.6719
Epoch 52/400
191/191 [==============================] - 0s 58us/step - loss: 0.5073 - accuracy: 0.7487 - val_loss: 0.6853 - val_accuracy: 0.6719
Epoch 53/400
191/191 [==============================] - 0s 58us/step - loss: 0.5072 - accuracy: 0.7487 - val_loss: 0.6851 - val_accuracy: 0.6719
Epoch 54/400
191/191 [==============================] - 0s 52us/step - loss: 0.5070 - accuracy: 0.7487 - val_loss: 0.6850 - val_accuracy: 0.6719
Epoch 55/400
191/191 [==============================] - 0s 58us/step - loss: 0.5069 - accuracy: 0.7487 - val_loss: 0.6851 - val_accuracy: 0.6719
Epoch 56/400
191/191 [==============================] - 0s 58us/step - loss: 0.5067 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 57/400
191/191 [==============================] - 0s 52us/step - loss: 0.5066 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719

Epoch 00057: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 58/400
191/191 [==============================] - 0s 73us/step - loss: 0.5064 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 59/400
191/191 [==============================] - 0s 73us/step - loss: 0.5063 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 60/400
191/191 [==============================] - 0s 68us/step - loss: 0.5063 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 61/400
191/191 [==============================] - 0s 58us/step - loss: 0.5062 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 62/400
191/191 [==============================] - 0s 89us/step - loss: 0.5061 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 63/400
191/191 [==============================] - 0s 110us/step - loss: 0.5060 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 64/400
191/191 [==============================] - 0s 84us/step - loss: 0.5060 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 65/400
191/191 [==============================] - 0s 78us/step - loss: 0.5059 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 66/400
191/191 [==============================] - 0s 73us/step - loss: 0.5058 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 67/400
191/191 [==============================] - 0s 73us/step - loss: 0.5058 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00067: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 68/400
191/191 [==============================] - 0s 68us/step - loss: 0.5057 - accuracy: 0.7487 - val_loss: 0.6846 - val_accuracy: 0.6719
Epoch 69/400
191/191 [==============================] - 0s 63us/step - loss: 0.5056 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 70/400
191/191 [==============================] - 0s 68us/step - loss: 0.5056 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 71/400
191/191 [==============================] - 0s 79us/step - loss: 0.5056 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 72/400
191/191 [==============================] - 0s 68us/step - loss: 0.5055 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 73/400
191/191 [==============================] - 0s 78us/step - loss: 0.5055 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 74/400
191/191 [==============================] - 0s 89us/step - loss: 0.5055 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 75/400
191/191 [==============================] - 0s 68us/step - loss: 0.5054 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 76/400
191/191 [==============================] - 0s 63us/step - loss: 0.5054 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 77/400
191/191 [==============================] - 0s 63us/step - loss: 0.5053 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719

Epoch 00077: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 78/400
191/191 [==============================] - 0s 58us/step - loss: 0.5053 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 79/400
191/191 [==============================] - 0s 52us/step - loss: 0.5053 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 80/400
191/191 [==============================] - 0s 68us/step - loss: 0.5053 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 81/400
191/191 [==============================] - 0s 68us/step - loss: 0.5052 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 82/400
191/191 [==============================] - 0s 58us/step - loss: 0.5052 - accuracy: 0.7487 - val_loss: 0.6848 - val_accuracy: 0.6719
Epoch 83/400
191/191 [==============================] - 0s 63us/step - loss: 0.5052 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 84/400
191/191 [==============================] - 0s 58us/step - loss: 0.5052 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 85/400
191/191 [==============================] - 0s 63us/step - loss: 0.5052 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 86/400
191/191 [==============================] - 0s 58us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 87/400
191/191 [==============================] - 0s 52us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00087: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 88/400
191/191 [==============================] - 0s 52us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 89/400
191/191 [==============================] - 0s 58us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 90/400
191/191 [==============================] - 0s 58us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 91/400
191/191 [==============================] - 0s 52us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 92/400
191/191 [==============================] - 0s 58us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 93/400
191/191 [==============================] - 0s 63us/step - loss: 0.5051 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 94/400
191/191 [==============================] - 0s 58us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 95/400
191/191 [==============================] - 0s 52us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 96/400
191/191 [==============================] - 0s 58us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 97/400
191/191 [==============================] - 0s 110us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00097: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 98/400
191/191 [==============================] - 0s 58us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 99/400
191/191 [==============================] - 0s 58us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 100/400
191/191 [==============================] - 0s 68us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 101/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 102/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 103/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 104/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 105/400
191/191 [==============================] - 0s 73us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 106/400
191/191 [==============================] - 0s 78us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 107/400
191/191 [==============================] - 0s 73us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00107: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06.
Epoch 108/400
191/191 [==============================] - 0s 78us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 109/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 110/400
191/191 [==============================] - 0s 52us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 111/400
191/191 [==============================] - 0s 84us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 112/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 113/400
191/191 [==============================] - 0s 63us/step - loss: 0.5050 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 114/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 115/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 116/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 117/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00117: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06.
Epoch 118/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 119/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 120/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 121/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 122/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 123/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 124/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 125/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 126/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 127/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00127: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06.
Epoch 128/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 129/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 130/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 131/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 132/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 133/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 134/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 135/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 136/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 137/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00137: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07.
Epoch 138/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 139/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 140/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 141/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 142/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 143/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 144/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 145/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 146/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 147/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00147: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07.
Epoch 148/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 149/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 150/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 151/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 152/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 153/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 154/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 155/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 156/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 157/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00157: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07.
Epoch 158/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 159/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 160/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 161/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 162/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 163/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 164/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 165/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 166/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 167/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00167: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07.
Epoch 168/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 169/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 170/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 171/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 172/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 173/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 174/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 175/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 176/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 177/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00177: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08.
Epoch 178/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 179/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 180/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 181/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 182/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 183/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 184/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 185/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 186/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 187/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00187: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08.
Epoch 188/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 189/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 190/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 191/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 192/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 193/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 194/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 195/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 196/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 197/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00197: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08.
Epoch 198/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 199/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 200/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 201/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 202/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 203/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 204/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 205/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 206/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 207/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00207: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09.
Epoch 208/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 209/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 210/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 211/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 212/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 213/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 214/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 215/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 216/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 217/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00217: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09.
Epoch 218/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 219/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 220/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 221/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 222/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 223/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 224/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 225/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 226/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 227/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00227: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09.
Epoch 228/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 229/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 230/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 231/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 232/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 233/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 234/400
191/191 [==============================] - 0s 32us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 235/400
191/191 [==============================] - 0s 110us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 236/400
191/191 [==============================] - 0s 47us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 237/400
191/191 [==============================] - 0s 47us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00237: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10.
Epoch 238/400
191/191 [==============================] - 0s 37us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 239/400
191/191 [==============================] - 0s 82us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 240/400
191/191 [==============================] - 0s 0us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 241/400
191/191 [==============================] - 0s 137us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 242/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 243/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 244/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 245/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 246/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 247/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00247: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10.
Epoch 248/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 249/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 250/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 251/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 252/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 253/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 254/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 255/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 256/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 257/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00257: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10.
Epoch 258/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 259/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 260/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 261/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 262/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 263/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 264/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 265/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 266/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 267/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00267: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10.
Epoch 268/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 269/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 270/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 271/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 272/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 273/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 274/400
191/191 [==============================] - 0s 58us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 275/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 276/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 277/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00277: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11.
Epoch 278/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 279/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 280/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 281/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 282/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 283/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 284/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 285/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 286/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 287/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00287: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11.
Epoch 288/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 289/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 290/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 291/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 292/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 293/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 294/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 295/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 296/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 297/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00297: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11.
Epoch 298/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 299/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 300/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 301/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 302/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 303/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 304/400
191/191 [==============================] - 0s 63us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 305/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 306/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 307/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00307: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12.
Epoch 308/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 309/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 310/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 311/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 312/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 313/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 314/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 315/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 316/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 317/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00317: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12.
Epoch 318/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 319/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 320/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 321/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 322/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 323/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 324/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 325/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 326/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 327/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00327: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12.
Epoch 328/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 329/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 330/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 331/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 332/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 333/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 334/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 335/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 336/400
191/191 [==============================] - 0s 110us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 337/400
191/191 [==============================] - 0s 105us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00337: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13.
Epoch 338/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 339/400
191/191 [==============================] - 0s 126us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 340/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 341/400
191/191 [==============================] - 0s 105us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 342/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 343/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 344/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 345/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 346/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 347/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00347: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13.
Epoch 348/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 349/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 350/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 351/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 352/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 353/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 354/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 355/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 356/400
191/191 [==============================] - 0s 110us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 357/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00357: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13.
Epoch 358/400
191/191 [==============================] - 0s 115us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 359/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 360/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 361/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 362/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 363/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 364/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 365/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 366/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 367/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00367: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13.
Epoch 368/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 369/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 370/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 371/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 372/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 373/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 374/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 375/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 376/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 377/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00377: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14.
Epoch 378/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 379/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 380/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 381/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 382/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 383/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 384/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 385/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 386/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 387/400
191/191 [==============================] - 0s 94us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00387: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14.
Epoch 388/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 389/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 390/400
191/191 [==============================] - 0s 84us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 391/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 392/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 393/400
191/191 [==============================] - 0s 99us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 394/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 395/400
191/191 [==============================] - 0s 89us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 396/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 397/400
191/191 [==============================] - 0s 68us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719

Epoch 00397: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14.
Epoch 398/400
191/191 [==============================] - 0s 78us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 399/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
Epoch 400/400
191/191 [==============================] - 0s 73us/step - loss: 0.5049 - accuracy: 0.7487 - val_loss: 0.6847 - val_accuracy: 0.6719
In [337]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
In [338]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step
test loss: 0.6846858859062195, test accuracy: 0.671875
In [339]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.4842105263157894
In [340]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.08943089430894313

KMeans

In [243]:
X
Out[243]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 0.898091 0.151819 -1.172713 0.474387 -0.020230 1.228657
1 0.618513 -0.762588 0.061946 0.944076 0.697880 0.021150
2 0.685649 0.002933 0.719805 -1.251700 -0.952424 1.444556
3 1.175209 -0.552349 0.336427 0.482978 -0.212146 -0.144225
4 1.350337 -1.407757 0.258917 -0.523670 0.099306 1.706064
5 0.907564 -1.769301 1.177857 -0.869472 0.392594 0.385760
6 -0.071420 -0.800769 0.238726 1.318866 -1.075628 -0.545006
7 0.476433 -1.202140 -1.713665 0.379487 -0.347674 0.777899
8 0.572039 -1.488738 -0.403914 -1.066061 -0.818836 0.339231
9 0.741137 0.139987 0.726307 1.670135 -0.317435 -1.091941
10 0.533655 -0.111619 0.435253 1.832919 -0.556933 -1.014603
11 -0.667308 0.502566 -1.137726 -0.714521 -0.497571 0.123297
12 0.161812 0.294263 0.659166 -0.336211 1.410350 -0.272418
13 -0.373777 -1.439681 0.009190 0.731635 0.138615 0.850511
14 0.745550 0.214669 0.209787 0.424963 0.448908 -0.204578
15 0.320726 0.108060 0.208510 -1.138882 -0.874041 -1.779091
16 0.646392 -0.726119 0.153724 -0.203580 -1.017329 -1.068601
17 -0.042981 -0.672256 0.358250 -0.385808 -0.341018 -1.823744
18 0.822192 0.184879 1.658679 1.705929 3.070140 -1.218005
19 0.175070 0.195153 1.969940 0.005043 0.430538 -1.502715
20 1.339692 -1.202498 0.487937 -0.769520 -1.973308 -0.400699
21 1.290923 -0.546138 0.120024 0.429258 -0.165681 0.856938
22 1.528224 -0.912727 0.962682 -0.386673 -0.772181 -0.291766
23 -0.486779 -1.124424 0.559106 0.746533 -1.101240 1.082216
24 -0.230729 0.999926 -0.678209 -0.175670 1.412258 0.572372
25 -0.632681 0.618852 -0.778803 -0.808112 -0.442115 -0.146177
26 -1.151505 -1.127449 1.500641 -0.822825 0.158380 0.792656
27 0.265739 -3.078847 -0.939567 0.268673 -0.642098 -0.984495
28 0.623357 -1.241561 -1.149654 1.231993 2.023015 -0.070476
29 0.930863 -1.763587 -1.608926 0.462097 -0.677599 -0.693427
... ... ... ... ... ... ...
225 -1.444140 -0.088370 -0.458428 0.530251 -0.475625 -0.057486
226 -0.297006 0.887935 0.467148 2.000374 -0.396849 -0.846195
227 -1.624166 0.777486 0.635044 -1.376180 0.998008 -0.910882
228 0.230618 1.438780 0.301556 -1.353873 -0.586627 -0.102947
229 -0.163123 1.329205 0.721279 -1.383030 0.540446 -1.181571
230 -1.337576 0.249897 0.081067 0.886335 -0.078090 -0.344245
231 0.304553 0.584052 0.915910 2.455180 1.007231 0.268298
232 -0.291785 0.247731 -0.740382 0.896773 0.457951 0.390640
233 -0.532056 1.686101 0.358185 -1.561985 0.911246 0.638759
234 -1.223692 0.723005 0.599197 -0.955626 0.653814 0.112686
235 1.412552 -0.817418 0.038464 -2.397710 -2.903923 1.454325
236 0.141392 -0.756740 -1.981390 -0.636588 0.230786 0.968907
237 1.157567 -0.442417 -1.342532 -0.893118 -0.552517 -0.791388
238 -1.683225 -0.036571 0.297162 -1.488549 1.387872 -0.306946
239 -0.997159 0.655257 2.239993 -1.422875 0.373101 0.159004
240 -1.142741 0.931927 1.440876 0.665641 -0.994237 -1.093039
241 -0.151675 -0.971306 0.447819 0.895444 -0.863907 0.150120
242 -0.837654 -1.170592 0.622658 0.448216 -0.830715 -0.222067
243 -0.059101 -0.857751 0.253657 0.272951 -0.833270 0.160823
244 1.455210 -1.123798 1.124970 -1.841854 -0.183521 -0.193778
245 1.459407 -1.071308 -0.261053 -0.731205 0.603463 0.358072
246 1.850117 -1.364586 1.015519 -1.479941 -1.262489 -0.485304
247 0.468703 0.776904 -1.200084 -0.109459 0.572206 0.353229
248 0.758187 -0.030802 -1.190930 -0.092637 0.048267 2.174173
249 0.465492 -0.042081 0.541343 0.584645 0.066443 -1.886670
250 -1.114193 1.666162 0.201458 -1.543125 -0.123758 -0.430641
251 -1.675129 1.101864 0.721966 -1.964153 0.827116 0.134812
252 -1.371728 0.888874 -0.186673 -0.931346 0.795500 -1.063218
253 0.221249 0.272024 -1.593712 -0.242394 0.752955 1.102656
254 -0.747040 1.308435 0.858494 -1.950134 1.779312 -0.711789

255 rows × 6 columns

In [244]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[244]:
[1530.0000000000002,
 1266.8988304034983,
 1085.4171102625123,
 963.5827926636907,
 872.5239995069635,
 797.6140851961846,
 747.1323294070899,
 703.670300371115,
 664.3614627122823,
 637.5590430281768,
 607.7011770650902,
 585.4389967082509,
 558.8506960652073,
 540.5660329891642]
In [245]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[245]:
[<matplotlib.lines.Line2D at 0x1e82fedf908>]

K=3

In [248]:
kmeans_tc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[248]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [249]:
kmeans_tc.labels_
Out[249]:
array([2, 0, 2, 0, 2, 0, 0, 2, 2, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 2,
       0, 0, 1, 1, 0, 0, 0, 2, 2, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 0, 0, 0,
       0, 2, 2, 0, 1, 0, 0, 0, 0, 2, 2, 2, 2, 0, 0, 0, 0, 2, 2, 2, 2, 0,
       1, 1, 1, 1, 1, 0, 2, 0, 0, 2, 2, 2, 0, 0, 2, 2, 0, 2, 0, 1, 0, 1,
       1, 1, 2, 2, 2, 0, 0, 0, 2, 1, 2, 1, 0, 2, 0, 2, 2, 1, 1, 2, 1, 1,
       1, 0, 1, 2, 0, 2, 2, 1, 1, 1, 2, 2, 0, 2, 2, 2, 1, 0, 2, 1, 0, 0,
       2, 2, 2, 2, 1, 1, 1, 2, 0, 0, 0, 0, 1, 0, 2, 1, 2, 2, 2, 2, 1, 1,
       1, 1, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 1, 2, 2,
       1, 2, 0, 1, 1, 2, 2, 2, 2, 0, 0, 2, 2, 1, 0, 2, 1, 1, 1, 2, 1, 2,
       2, 2, 1, 1, 2, 2, 1, 2, 0, 2, 0, 2, 2, 0, 0, 0, 0, 0, 2, 2, 0, 2,
       0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 1, 1, 1, 0,
       0, 0, 0, 2, 0, 2, 2, 0, 1, 1, 1, 2, 1])
In [250]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[250]:
array([2, 0, 2, 0, 2, 0, 0, 2, 2, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 2,
       0, 0, 1, 1, 0, 0, 0, 2, 2, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 0, 0, 0,
       0, 2, 2, 0, 1, 0, 0, 0, 0, 2, 2, 2, 2, 0, 0, 0, 0, 2, 2, 2, 2, 0,
       1, 1, 1, 1, 1, 0, 2, 0, 0, 2, 2, 2, 0, 0, 2, 2, 0, 2, 0, 1, 0, 1,
       1, 1, 2, 2, 2, 0, 0, 0, 2, 1, 2, 1, 0, 2, 0, 2, 2, 1, 1, 2, 1, 1,
       1, 0, 1, 2, 0, 2, 2, 1, 1, 1, 2, 2, 0, 2, 2, 2, 1, 0, 2, 1, 0, 0,
       2, 2, 2, 2, 1, 1, 1, 2, 0, 0, 0, 0, 1, 0, 2, 1, 2, 2, 2, 2, 1, 1,
       1, 1, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 1, 2, 2,
       1, 2, 0, 1, 1, 2, 2, 2, 2, 0, 0, 2, 2, 1, 0, 2, 1, 1, 1, 2, 1, 2,
       2, 2, 1, 1, 2, 2, 1, 2, 0, 2, 0, 2, 2, 0, 0, 0, 0, 0, 2, 2, 0, 2,
       0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 1, 1, 1, 0,
       0, 0, 0, 2, 0, 2, 2, 0, 1, 1, 1, 2, 1])
In [251]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [252]:
X
Out[252]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 0.898091 0.151819 -1.172713 0.474387 -0.020230 1.228657 2 0
1 0.618513 -0.762588 0.061946 0.944076 0.697880 0.021150 0 0
2 0.685649 0.002933 0.719805 -1.251700 -0.952424 1.444556 2 0
3 1.175209 -0.552349 0.336427 0.482978 -0.212146 -0.144225 0 0
4 1.350337 -1.407757 0.258917 -0.523670 0.099306 1.706064 2 0
5 0.907564 -1.769301 1.177857 -0.869472 0.392594 0.385760 0 0
6 -0.071420 -0.800769 0.238726 1.318866 -1.075628 -0.545006 0 0
7 0.476433 -1.202140 -1.713665 0.379487 -0.347674 0.777899 2 0
8 0.572039 -1.488738 -0.403914 -1.066061 -0.818836 0.339231 2 0
9 0.741137 0.139987 0.726307 1.670135 -0.317435 -1.091941 0 0
10 0.533655 -0.111619 0.435253 1.832919 -0.556933 -1.014603 0 0
11 -0.667308 0.502566 -1.137726 -0.714521 -0.497571 0.123297 2 0
12 0.161812 0.294263 0.659166 -0.336211 1.410350 -0.272418 1 0
13 -0.373777 -1.439681 0.009190 0.731635 0.138615 0.850511 0 0
14 0.745550 0.214669 0.209787 0.424963 0.448908 -0.204578 0 0
15 0.320726 0.108060 0.208510 -1.138882 -0.874041 -1.779091 1 0
16 0.646392 -0.726119 0.153724 -0.203580 -1.017329 -1.068601 0 0
17 -0.042981 -0.672256 0.358250 -0.385808 -0.341018 -1.823744 0 0
18 0.822192 0.184879 1.658679 1.705929 3.070140 -1.218005 0 0
19 0.175070 0.195153 1.969940 0.005043 0.430538 -1.502715 0 0
20 1.339692 -1.202498 0.487937 -0.769520 -1.973308 -0.400699 0 0
21 1.290923 -0.546138 0.120024 0.429258 -0.165681 0.856938 2 0
22 1.528224 -0.912727 0.962682 -0.386673 -0.772181 -0.291766 0 0
23 -0.486779 -1.124424 0.559106 0.746533 -1.101240 1.082216 0 0
24 -0.230729 0.999926 -0.678209 -0.175670 1.412258 0.572372 1 0
25 -0.632681 0.618852 -0.778803 -0.808112 -0.442115 -0.146177 1 0
26 -1.151505 -1.127449 1.500641 -0.822825 0.158380 0.792656 0 0
27 0.265739 -3.078847 -0.939567 0.268673 -0.642098 -0.984495 0 0
28 0.623357 -1.241561 -1.149654 1.231993 2.023015 -0.070476 0 0
29 0.930863 -1.763587 -1.608926 0.462097 -0.677599 -0.693427 2 0
... ... ... ... ... ... ... ... ...
225 -1.444140 -0.088370 -0.458428 0.530251 -0.475625 -0.057486 1 1
226 -0.297006 0.887935 0.467148 2.000374 -0.396849 -0.846195 0 1
227 -1.624166 0.777486 0.635044 -1.376180 0.998008 -0.910882 1 1
228 0.230618 1.438780 0.301556 -1.353873 -0.586627 -0.102947 1 1
229 -0.163123 1.329205 0.721279 -1.383030 0.540446 -1.181571 1 1
230 -1.337576 0.249897 0.081067 0.886335 -0.078090 -0.344245 1 1
231 0.304553 0.584052 0.915910 2.455180 1.007231 0.268298 0 1
232 -0.291785 0.247731 -0.740382 0.896773 0.457951 0.390640 2 1
233 -0.532056 1.686101 0.358185 -1.561985 0.911246 0.638759 1 1
234 -1.223692 0.723005 0.599197 -0.955626 0.653814 0.112686 1 1
235 1.412552 -0.817418 0.038464 -2.397710 -2.903923 1.454325 2 1
236 0.141392 -0.756740 -1.981390 -0.636588 0.230786 0.968907 2 1
237 1.157567 -0.442417 -1.342532 -0.893118 -0.552517 -0.791388 2 1
238 -1.683225 -0.036571 0.297162 -1.488549 1.387872 -0.306946 1 1
239 -0.997159 0.655257 2.239993 -1.422875 0.373101 0.159004 1 1
240 -1.142741 0.931927 1.440876 0.665641 -0.994237 -1.093039 1 1
241 -0.151675 -0.971306 0.447819 0.895444 -0.863907 0.150120 0 1
242 -0.837654 -1.170592 0.622658 0.448216 -0.830715 -0.222067 0 1
243 -0.059101 -0.857751 0.253657 0.272951 -0.833270 0.160823 0 1
244 1.455210 -1.123798 1.124970 -1.841854 -0.183521 -0.193778 0 1
245 1.459407 -1.071308 -0.261053 -0.731205 0.603463 0.358072 2 1
246 1.850117 -1.364586 1.015519 -1.479941 -1.262489 -0.485304 0 1
247 0.468703 0.776904 -1.200084 -0.109459 0.572206 0.353229 2 1
248 0.758187 -0.030802 -1.190930 -0.092637 0.048267 2.174173 2 1
249 0.465492 -0.042081 0.541343 0.584645 0.066443 -1.886670 0 1
250 -1.114193 1.666162 0.201458 -1.543125 -0.123758 -0.430641 1 1
251 -1.675129 1.101864 0.721966 -1.964153 0.827116 0.134812 1 1
252 -1.371728 0.888874 -0.186673 -0.931346 0.795500 -1.063218 1 1
253 0.221249 0.272024 -1.593712 -0.242394 0.752955 1.102656 2 1
254 -0.747040 1.308435 0.858494 -1.950134 1.779312 -0.711789 1 1

255 rows × 8 columns

In [253]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[253]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e8304ba3c8>
In [75]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[2]))

Gramma

ANN

In [341]:
X = df_n_ps_std_tc[2]
In [342]:
y = df_n_ps[2]['chosen']
In [343]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [344]:
X_train.shape
Out[344]:
(231, 6)
In [260]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [261]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [262]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [263]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'logistic', 'hidden_layer_sizes': (10,), 'learning_rate_init': 0.01, 'max_iter': 10}, que permiten obtener un Accuracy de 81.82% y un Kappa del 6.27
Tiempo total: 28.04 minutos
In [345]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.01
epochs = 10
In [346]:
input_tensor = Input(shape = (n0,))
In [347]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [348]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [349]:
model.summary()
Model: "model_17"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_17 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_55 (Dense)             (None, 10)                70        
_________________________________________________________________
dense_56 (Dense)             (None, 1)                 11        
=================================================================
Total params: 81
Trainable params: 81
Non-trainable params: 0
_________________________________________________________________
In [350]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 231 samples, validate on 78 samples
Epoch 1/10
231/231 [==============================] - 0s 632us/step - loss: 0.7179 - accuracy: 0.5281 - val_loss: 0.6663 - val_accuracy: 0.6026
Epoch 2/10
231/231 [==============================] - 0s 52us/step - loss: 0.6066 - accuracy: 0.7013 - val_loss: 0.6283 - val_accuracy: 0.6667
Epoch 3/10
231/231 [==============================] - 0s 56us/step - loss: 0.5464 - accuracy: 0.7576 - val_loss: 0.6155 - val_accuracy: 0.6538
Epoch 4/10
231/231 [==============================] - 0s 56us/step - loss: 0.5144 - accuracy: 0.7706 - val_loss: 0.5962 - val_accuracy: 0.7308
Epoch 5/10
231/231 [==============================] - 0s 56us/step - loss: 0.4943 - accuracy: 0.8009 - val_loss: 0.5916 - val_accuracy: 0.7436
Epoch 6/10
231/231 [==============================] - 0s 56us/step - loss: 0.4849 - accuracy: 0.8095 - val_loss: 0.5781 - val_accuracy: 0.7436
Epoch 7/10
231/231 [==============================] - 0s 56us/step - loss: 0.4773 - accuracy: 0.8052 - val_loss: 0.5673 - val_accuracy: 0.7436
Epoch 8/10
231/231 [==============================] - 0s 56us/step - loss: 0.4764 - accuracy: 0.8052 - val_loss: 0.5616 - val_accuracy: 0.7692
Epoch 9/10
231/231 [==============================] - 0s 52us/step - loss: 0.4757 - accuracy: 0.8052 - val_loss: 0.5632 - val_accuracy: 0.7692
Epoch 10/10
231/231 [==============================] - 0s 56us/step - loss: 0.4710 - accuracy: 0.8052 - val_loss: 0.5628 - val_accuracy: 0.7692
In [351]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 10)
In [352]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
78/78 [==============================] - 0s 51us/step
test loss: 0.5627795901053991, test accuracy: 0.7692307829856873
In [353]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.5194444444444444
In [354]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.05645161290322598

KMeans

In [274]:
X
Out[274]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 0.609365 0.535135 0.096066 1.915075 0.555249 1.854023
1 -0.157519 -1.311578 -0.486252 0.435334 -0.648735 -1.184658
2 -1.148976 -1.325889 0.573178 -1.556913 0.331644 1.236576
3 -0.996141 -0.557658 0.998693 -0.200592 -0.357882 0.086757
4 -0.648977 -0.248473 0.461357 -1.268368 -1.468590 -0.111563
5 0.642800 -1.165140 -1.792767 -0.560937 -0.122789 1.152255
6 0.443136 -0.482419 -1.016712 0.352897 -0.127270 0.658601
7 -0.022060 -0.811084 -0.405764 0.350011 0.381710 -0.262666
8 0.961630 1.407288 1.551164 0.606159 -0.391772 0.029812
9 0.798279 1.109447 -0.137057 0.704421 -0.893816 1.290122
10 0.685212 1.076167 -0.288224 1.508940 0.286089 2.439563
11 0.803628 1.560996 0.774426 -0.123135 -0.047389 -0.441250
12 0.099890 1.445746 -1.453815 -0.214603 -1.139872 -0.252335
13 -0.212854 1.286504 0.720036 -0.872519 0.955706 -2.078957
14 -1.537501 -0.213678 -1.834954 -1.070992 1.539047 0.982703
15 -1.440013 0.872943 -0.809520 -0.147752 0.222384 -1.045942
16 -1.697309 0.606859 -1.747229 0.351080 1.440681 -1.505620
17 1.508685 0.904715 2.767009 0.452322 0.416081 -1.050028
18 0.675946 0.502807 1.115627 -1.151104 0.856946 -1.406132
19 1.299159 0.428687 1.764048 2.076242 0.575092 0.580747
20 0.769693 -0.740825 0.452717 -0.142755 0.170817 -0.358021
21 0.495856 -0.499875 -0.034482 0.350727 0.157251 0.626608
22 0.772290 -0.678641 0.547946 -0.428025 -1.433904 -0.355505
23 0.137654 1.496537 0.060579 -0.502941 0.411442 -0.122067
24 0.347129 0.809472 0.948516 -0.022364 -0.154971 -0.309935
25 0.029111 1.842864 0.123391 0.049825 0.617290 -0.085588
26 -1.473729 0.531152 0.801294 -1.410029 1.941482 0.485707
27 -0.484210 0.210847 1.009613 1.253693 -0.492018 -0.245022
28 -0.013618 -2.024383 -0.832429 0.712753 0.584767 0.136182
29 -1.746529 0.488857 -0.774537 0.083168 1.702499 -0.373082
... ... ... ... ... ... ...
279 -0.218279 -0.903841 -1.454712 1.847673 -0.519253 -0.262567
280 0.264235 -1.579208 -1.404331 -0.267295 1.169932 0.986186
281 -0.895563 0.461466 0.497480 0.976976 -0.096074 0.123883
282 -1.010758 0.423694 0.383226 0.839004 -0.068742 -0.327768
283 -1.300227 0.920815 0.844807 0.541693 0.080564 -0.199530
284 1.450169 0.335733 1.585783 0.110995 1.150821 -0.583010
285 1.913390 0.883060 1.899591 1.145796 0.539016 0.768216
286 1.444559 0.481538 1.742822 0.581765 1.145564 -0.912228
287 1.470621 -0.218928 -0.057911 -0.985989 -1.598797 -0.641710
288 1.040063 -0.416007 0.629628 -0.616364 -1.267930 -0.730043
289 1.328762 -0.491263 0.742595 -0.863279 -1.402041 -0.885662
290 0.397008 1.213991 -0.429044 0.832112 -0.843278 1.390832
291 0.172503 0.897500 -0.272973 0.210231 -0.458942 0.381469
292 0.661186 1.163101 -0.294020 -0.024527 -0.744258 0.845600
293 -0.773212 -0.895468 0.318110 1.184648 -0.212121 -0.353155
294 -0.370777 -1.414554 -1.094557 0.703279 1.214849 0.177043
295 0.138250 -1.936963 0.151220 0.398994 2.119363 1.534181
296 0.819776 -0.472427 -0.322556 0.733590 0.794066 0.850059
297 0.754438 -1.235411 0.322527 -0.225499 -1.850620 0.965915
298 1.007225 -0.846471 -0.431575 -0.067897 0.376757 0.841471
299 0.218592 -0.968903 -0.899045 -0.447626 -0.181640 -0.029332
300 0.664524 -0.896436 -1.249763 0.108770 0.071596 -0.691963
301 0.711838 -1.301487 -0.735548 -0.243133 0.655501 0.656529
302 -0.800747 0.870240 0.930120 1.819532 0.060854 0.468146
303 -1.334960 1.396005 2.690760 1.279657 0.956382 1.160282
304 -0.792172 0.906439 1.374725 1.647470 -0.613395 0.102004
305 -1.381202 0.689863 -0.347752 0.607044 -0.309056 -0.227433
306 -1.296109 0.634783 -0.484683 0.814045 -0.809678 0.515808
307 0.326809 -0.987801 -1.740993 0.307094 2.260097 0.394211
308 0.873341 2.041793 -1.371451 -2.438935 -0.567199 -1.300657

309 rows × 6 columns

In [275]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[275]:
[1854.0,
 1536.8736642821204,
 1324.7285075839484,
 1167.9156349888185,
 1059.6165149053068,
 950.057768838126,
 894.4360136638212,
 823.7421512103674,
 771.932101822877,
 716.3847905620515,
 681.7237006830833,
 653.1513609666642,
 638.9262100013641,
 609.9795957680683]
In [276]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[276]:
[<matplotlib.lines.Line2D at 0x1e831b5c9e8>]

K=4

In [277]:
kmeans_tc = KMeans(n_clusters=4, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[277]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=4, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [278]:
kmeans_tc.labels_
Out[278]:
array([2, 1, 3, 1, 3, 2, 2, 2, 1, 2, 2, 3, 1, 0, 0, 0, 0, 3, 0, 1, 3, 2,
       3, 0, 1, 0, 0, 1, 2, 0, 2, 2, 2, 1, 0, 2, 2, 3, 1, 1, 1, 2, 2, 2,
       2, 2, 1, 3, 3, 3, 1, 1, 1, 0, 0, 0, 2, 2, 3, 2, 0, 2, 0, 2, 2, 2,
       3, 1, 1, 1, 1, 3, 3, 3, 0, 0, 2, 1, 1, 1, 2, 2, 2, 0, 1, 0, 3, 3,
       3, 2, 0, 1, 0, 2, 1, 1, 1, 1, 2, 2, 2, 3, 0, 1, 1, 1, 3, 2, 0, 2,
       0, 3, 3, 3, 2, 0, 0, 1, 0, 0, 3, 3, 0, 1, 1, 0, 3, 3, 2, 0, 2, 0,
       0, 0, 3, 2, 1, 0, 0, 0, 3, 3, 1, 2, 3, 3, 0, 1, 0, 0, 3, 0, 1, 3,
       2, 0, 3, 1, 3, 1, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 3, 1, 1, 1, 3,
       3, 2, 2, 2, 2, 2, 2, 3, 1, 0, 0, 1, 2, 1, 1, 0, 0, 2, 3, 3, 1, 0,
       1, 0, 1, 2, 2, 2, 1, 0, 1, 3, 3, 3, 2, 2, 2, 0, 1, 1, 1, 0, 1, 1,
       1, 2, 2, 3, 1, 3, 3, 0, 3, 3, 3, 3, 1, 1, 3, 3, 3, 3, 0, 0, 0, 1,
       1, 3, 3, 1, 2, 3, 2, 3, 1, 3, 2, 0, 1, 2, 2, 2, 0, 1, 0, 3, 2, 3,
       0, 0, 2, 0, 0, 0, 3, 2, 2, 2, 1, 1, 2, 1, 0, 2, 2, 1, 1, 1, 3, 3,
       3, 3, 3, 3, 2, 1, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2,
       0])
In [279]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[279]:
array([2, 1, 3, 1, 3, 2, 2, 2, 1, 2, 2, 3, 1, 0, 0, 0, 0, 3, 0, 1, 3, 2,
       3, 0, 1, 0, 0, 1, 2, 0, 2, 2, 2, 1, 0, 2, 2, 3, 1, 1, 1, 2, 2, 2,
       2, 2, 1, 3, 3, 3, 1, 1, 1, 0, 0, 0, 2, 2, 3, 2, 0, 2, 0, 2, 2, 2,
       3, 1, 1, 1, 1, 3, 3, 3, 0, 0, 2, 1, 1, 1, 2, 2, 2, 0, 1, 0, 3, 3,
       3, 2, 0, 1, 0, 2, 1, 1, 1, 1, 2, 2, 2, 3, 0, 1, 1, 1, 3, 2, 0, 2,
       0, 3, 3, 3, 2, 0, 0, 1, 0, 0, 3, 3, 0, 1, 1, 0, 3, 3, 2, 0, 2, 0,
       0, 0, 3, 2, 1, 0, 0, 0, 3, 3, 1, 2, 3, 3, 0, 1, 0, 0, 3, 0, 1, 3,
       2, 0, 3, 1, 3, 1, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 3, 1, 1, 1, 3,
       3, 2, 2, 2, 2, 2, 2, 3, 1, 0, 0, 1, 2, 1, 1, 0, 0, 2, 3, 3, 1, 0,
       1, 0, 1, 2, 2, 2, 1, 0, 1, 3, 3, 3, 2, 2, 2, 0, 1, 1, 1, 0, 1, 1,
       1, 2, 2, 3, 1, 3, 3, 0, 3, 3, 3, 3, 1, 1, 3, 3, 3, 3, 0, 0, 0, 1,
       1, 3, 3, 1, 2, 3, 2, 3, 1, 3, 2, 0, 1, 2, 2, 2, 0, 1, 0, 3, 2, 3,
       0, 0, 2, 0, 0, 0, 3, 2, 2, 2, 1, 1, 2, 1, 0, 2, 2, 1, 1, 1, 3, 3,
       3, 3, 3, 3, 2, 1, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2,
       0])
In [280]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [281]:
X
Out[281]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 0.609365 0.535135 0.096066 1.915075 0.555249 1.854023 2 0
1 -0.157519 -1.311578 -0.486252 0.435334 -0.648735 -1.184658 1 0
2 -1.148976 -1.325889 0.573178 -1.556913 0.331644 1.236576 3 0
3 -0.996141 -0.557658 0.998693 -0.200592 -0.357882 0.086757 1 0
4 -0.648977 -0.248473 0.461357 -1.268368 -1.468590 -0.111563 3 0
5 0.642800 -1.165140 -1.792767 -0.560937 -0.122789 1.152255 2 0
6 0.443136 -0.482419 -1.016712 0.352897 -0.127270 0.658601 2 0
7 -0.022060 -0.811084 -0.405764 0.350011 0.381710 -0.262666 2 0
8 0.961630 1.407288 1.551164 0.606159 -0.391772 0.029812 1 0
9 0.798279 1.109447 -0.137057 0.704421 -0.893816 1.290122 2 0
10 0.685212 1.076167 -0.288224 1.508940 0.286089 2.439563 2 0
11 0.803628 1.560996 0.774426 -0.123135 -0.047389 -0.441250 3 0
12 0.099890 1.445746 -1.453815 -0.214603 -1.139872 -0.252335 1 0
13 -0.212854 1.286504 0.720036 -0.872519 0.955706 -2.078957 0 0
14 -1.537501 -0.213678 -1.834954 -1.070992 1.539047 0.982703 0 0
15 -1.440013 0.872943 -0.809520 -0.147752 0.222384 -1.045942 0 0
16 -1.697309 0.606859 -1.747229 0.351080 1.440681 -1.505620 0 0
17 1.508685 0.904715 2.767009 0.452322 0.416081 -1.050028 3 0
18 0.675946 0.502807 1.115627 -1.151104 0.856946 -1.406132 0 0
19 1.299159 0.428687 1.764048 2.076242 0.575092 0.580747 1 0
20 0.769693 -0.740825 0.452717 -0.142755 0.170817 -0.358021 3 0
21 0.495856 -0.499875 -0.034482 0.350727 0.157251 0.626608 2 0
22 0.772290 -0.678641 0.547946 -0.428025 -1.433904 -0.355505 3 0
23 0.137654 1.496537 0.060579 -0.502941 0.411442 -0.122067 0 0
24 0.347129 0.809472 0.948516 -0.022364 -0.154971 -0.309935 1 0
25 0.029111 1.842864 0.123391 0.049825 0.617290 -0.085588 0 0
26 -1.473729 0.531152 0.801294 -1.410029 1.941482 0.485707 0 0
27 -0.484210 0.210847 1.009613 1.253693 -0.492018 -0.245022 1 0
28 -0.013618 -2.024383 -0.832429 0.712753 0.584767 0.136182 2 0
29 -1.746529 0.488857 -0.774537 0.083168 1.702499 -0.373082 0 0
... ... ... ... ... ... ... ... ...
279 -0.218279 -0.903841 -1.454712 1.847673 -0.519253 -0.262567 2 1
280 0.264235 -1.579208 -1.404331 -0.267295 1.169932 0.986186 2 1
281 -0.895563 0.461466 0.497480 0.976976 -0.096074 0.123883 1 1
282 -1.010758 0.423694 0.383226 0.839004 -0.068742 -0.327768 1 1
283 -1.300227 0.920815 0.844807 0.541693 0.080564 -0.199530 1 1
284 1.450169 0.335733 1.585783 0.110995 1.150821 -0.583010 3 1
285 1.913390 0.883060 1.899591 1.145796 0.539016 0.768216 3 1
286 1.444559 0.481538 1.742822 0.581765 1.145564 -0.912228 3 1
287 1.470621 -0.218928 -0.057911 -0.985989 -1.598797 -0.641710 3 1
288 1.040063 -0.416007 0.629628 -0.616364 -1.267930 -0.730043 3 1
289 1.328762 -0.491263 0.742595 -0.863279 -1.402041 -0.885662 3 1
290 0.397008 1.213991 -0.429044 0.832112 -0.843278 1.390832 2 1
291 0.172503 0.897500 -0.272973 0.210231 -0.458942 0.381469 1 1
292 0.661186 1.163101 -0.294020 -0.024527 -0.744258 0.845600 2 1
293 -0.773212 -0.895468 0.318110 1.184648 -0.212121 -0.353155 1 1
294 -0.370777 -1.414554 -1.094557 0.703279 1.214849 0.177043 2 1
295 0.138250 -1.936963 0.151220 0.398994 2.119363 1.534181 2 1
296 0.819776 -0.472427 -0.322556 0.733590 0.794066 0.850059 2 1
297 0.754438 -1.235411 0.322527 -0.225499 -1.850620 0.965915 3 1
298 1.007225 -0.846471 -0.431575 -0.067897 0.376757 0.841471 2 1
299 0.218592 -0.968903 -0.899045 -0.447626 -0.181640 -0.029332 2 1
300 0.664524 -0.896436 -1.249763 0.108770 0.071596 -0.691963 2 1
301 0.711838 -1.301487 -0.735548 -0.243133 0.655501 0.656529 2 1
302 -0.800747 0.870240 0.930120 1.819532 0.060854 0.468146 1 1
303 -1.334960 1.396005 2.690760 1.279657 0.956382 1.160282 1 1
304 -0.792172 0.906439 1.374725 1.647470 -0.613395 0.102004 1 1
305 -1.381202 0.689863 -0.347752 0.607044 -0.309056 -0.227433 1 1
306 -1.296109 0.634783 -0.484683 0.814045 -0.809678 0.515808 1 1
307 0.326809 -0.987801 -1.740993 0.307094 2.260097 0.394211 2 1
308 0.873341 2.041793 -1.371451 -2.438935 -0.567199 -1.300657 0 1

309 rows × 8 columns

In [282]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[282]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e831bdb940>
In [355]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[3]))

Hotel Marrakech

ANN

In [356]:
X = df_n_ps_std_tc[3]
In [357]:
y = df_n_ps[3]['chosen']
In [358]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [359]:
X_train.shape
Out[359]:
(139, 6)
In [288]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [289]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [290]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [291]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30,), 'learning_rate_init': 0.007, 'max_iter': 1000}, que permiten obtener un Accuracy de 80.58% y un Kappa del 50.08
Tiempo total: 22.27 minutos
In [360]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.007
epochs = 1000
In [361]:
input_tensor = Input(shape = (n0,))
In [362]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [363]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [364]:
model.summary()
Model: "model_18"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_18 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_57 (Dense)             (None, 30)                210       
_________________________________________________________________
dense_58 (Dense)             (None, 1)                 31        
=================================================================
Total params: 241
Trainable params: 241
Non-trainable params: 0
_________________________________________________________________
In [365]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 139 samples, validate on 47 samples
Epoch 1/1000
139/139 [==============================] - 0s 1ms/step - loss: 0.6991 - accuracy: 0.5396 - val_loss: 0.6750 - val_accuracy: 0.6596
Epoch 2/1000
139/139 [==============================] - 0s 72us/step - loss: 0.6502 - accuracy: 0.6331 - val_loss: 0.6494 - val_accuracy: 0.6596
Epoch 3/1000
139/139 [==============================] - 0s 72us/step - loss: 0.6211 - accuracy: 0.6691 - val_loss: 0.6343 - val_accuracy: 0.6809
Epoch 4/1000
139/139 [==============================] - 0s 79us/step - loss: 0.6115 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6809
Epoch 5/1000
139/139 [==============================] - 0s 65us/step - loss: 0.6039 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 6/1000
139/139 [==============================] - 0s 72us/step - loss: 0.6032 - accuracy: 0.6835 - val_loss: 0.6279 - val_accuracy: 0.6383
Epoch 7/1000
139/139 [==============================] - 0s 72us/step - loss: 0.6017 - accuracy: 0.6978 - val_loss: 0.6370 - val_accuracy: 0.6383
Epoch 8/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5974 - accuracy: 0.6978 - val_loss: 0.6361 - val_accuracy: 0.6383
Epoch 9/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5954 - accuracy: 0.6978 - val_loss: 0.6361 - val_accuracy: 0.6383
Epoch 10/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5932 - accuracy: 0.6978 - val_loss: 0.6377 - val_accuracy: 0.6383
Epoch 11/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5925 - accuracy: 0.6906 - val_loss: 0.6333 - val_accuracy: 0.6383
Epoch 12/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5886 - accuracy: 0.6906 - val_loss: 0.6396 - val_accuracy: 0.6383
Epoch 13/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5881 - accuracy: 0.6835 - val_loss: 0.6433 - val_accuracy: 0.6383

Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.0035000001080334187.
Epoch 14/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5854 - accuracy: 0.6835 - val_loss: 0.6396 - val_accuracy: 0.6383
Epoch 15/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5840 - accuracy: 0.6835 - val_loss: 0.6392 - val_accuracy: 0.6383
Epoch 16/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5829 - accuracy: 0.6835 - val_loss: 0.6382 - val_accuracy: 0.6383
Epoch 17/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5809 - accuracy: 0.6835 - val_loss: 0.6360 - val_accuracy: 0.6383
Epoch 18/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5801 - accuracy: 0.6906 - val_loss: 0.6355 - val_accuracy: 0.6383
Epoch 19/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5783 - accuracy: 0.6906 - val_loss: 0.6337 - val_accuracy: 0.6383
Epoch 20/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5777 - accuracy: 0.6835 - val_loss: 0.6298 - val_accuracy: 0.6383
Epoch 21/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5765 - accuracy: 0.6906 - val_loss: 0.6288 - val_accuracy: 0.6383
Epoch 22/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5753 - accuracy: 0.6978 - val_loss: 0.6276 - val_accuracy: 0.6383
Epoch 23/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5739 - accuracy: 0.6978 - val_loss: 0.6261 - val_accuracy: 0.6383

Epoch 00023: ReduceLROnPlateau reducing learning rate to 0.0017500000540167093.
Epoch 24/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5725 - accuracy: 0.6978 - val_loss: 0.6265 - val_accuracy: 0.6383
Epoch 25/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5722 - accuracy: 0.6978 - val_loss: 0.6269 - val_accuracy: 0.6383
Epoch 26/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5717 - accuracy: 0.6978 - val_loss: 0.6275 - val_accuracy: 0.6383
Epoch 27/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5710 - accuracy: 0.6906 - val_loss: 0.6281 - val_accuracy: 0.6383
Epoch 28/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5711 - accuracy: 0.6835 - val_loss: 0.6284 - val_accuracy: 0.6383
Epoch 29/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5698 - accuracy: 0.6835 - val_loss: 0.6282 - val_accuracy: 0.6383
Epoch 30/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5696 - accuracy: 0.6906 - val_loss: 0.6289 - val_accuracy: 0.6383
Epoch 31/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5692 - accuracy: 0.6906 - val_loss: 0.6276 - val_accuracy: 0.6383
Epoch 32/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5681 - accuracy: 0.6906 - val_loss: 0.6276 - val_accuracy: 0.6383
Epoch 33/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5677 - accuracy: 0.6906 - val_loss: 0.6274 - val_accuracy: 0.6383

Epoch 00033: ReduceLROnPlateau reducing learning rate to 0.0008750000270083547.
Epoch 34/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5669 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383
Epoch 35/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5664 - accuracy: 0.6906 - val_loss: 0.6274 - val_accuracy: 0.6383
Epoch 36/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5662 - accuracy: 0.6906 - val_loss: 0.6278 - val_accuracy: 0.6383
Epoch 37/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5659 - accuracy: 0.6906 - val_loss: 0.6281 - val_accuracy: 0.6383
Epoch 38/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5655 - accuracy: 0.6906 - val_loss: 0.6285 - val_accuracy: 0.6383
Epoch 39/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5652 - accuracy: 0.6906 - val_loss: 0.6287 - val_accuracy: 0.6383
Epoch 40/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5651 - accuracy: 0.6906 - val_loss: 0.6295 - val_accuracy: 0.6383
Epoch 41/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5645 - accuracy: 0.6906 - val_loss: 0.6295 - val_accuracy: 0.6383
Epoch 42/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5642 - accuracy: 0.6906 - val_loss: 0.6296 - val_accuracy: 0.6383
Epoch 43/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5641 - accuracy: 0.6906 - val_loss: 0.6298 - val_accuracy: 0.6383

Epoch 00043: ReduceLROnPlateau reducing learning rate to 0.00043750001350417733.
Epoch 44/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5635 - accuracy: 0.6906 - val_loss: 0.6296 - val_accuracy: 0.6383
Epoch 45/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5633 - accuracy: 0.6906 - val_loss: 0.6297 - val_accuracy: 0.6383
Epoch 46/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5632 - accuracy: 0.6835 - val_loss: 0.6298 - val_accuracy: 0.6383
Epoch 47/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5630 - accuracy: 0.6835 - val_loss: 0.6292 - val_accuracy: 0.6383
Epoch 48/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5630 - accuracy: 0.6906 - val_loss: 0.6285 - val_accuracy: 0.6383
Epoch 49/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5626 - accuracy: 0.6906 - val_loss: 0.6284 - val_accuracy: 0.6383
Epoch 50/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5625 - accuracy: 0.6906 - val_loss: 0.6280 - val_accuracy: 0.6383
Epoch 51/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5622 - accuracy: 0.6906 - val_loss: 0.6279 - val_accuracy: 0.6383
Epoch 52/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5621 - accuracy: 0.6906 - val_loss: 0.6282 - val_accuracy: 0.6383
Epoch 53/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5620 - accuracy: 0.6906 - val_loss: 0.6281 - val_accuracy: 0.6383

Epoch 00053: ReduceLROnPlateau reducing learning rate to 0.00021875000675208867.
Epoch 54/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5618 - accuracy: 0.6906 - val_loss: 0.6281 - val_accuracy: 0.6383
Epoch 55/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5617 - accuracy: 0.6906 - val_loss: 0.6281 - val_accuracy: 0.6383
Epoch 56/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5616 - accuracy: 0.6906 - val_loss: 0.6279 - val_accuracy: 0.6383
Epoch 57/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5616 - accuracy: 0.6906 - val_loss: 0.6277 - val_accuracy: 0.6383
Epoch 58/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5614 - accuracy: 0.6906 - val_loss: 0.6277 - val_accuracy: 0.6383
Epoch 59/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5613 - accuracy: 0.6906 - val_loss: 0.6278 - val_accuracy: 0.6383
Epoch 60/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5613 - accuracy: 0.6906 - val_loss: 0.6276 - val_accuracy: 0.6383
Epoch 61/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5612 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383
Epoch 62/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5611 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383
Epoch 63/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5610 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383

Epoch 00063: ReduceLROnPlateau reducing learning rate to 0.00010937500337604433.
Epoch 64/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5609 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383
Epoch 65/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5609 - accuracy: 0.6906 - val_loss: 0.6274 - val_accuracy: 0.6383
Epoch 66/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5608 - accuracy: 0.6906 - val_loss: 0.6274 - val_accuracy: 0.6383
Epoch 67/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5608 - accuracy: 0.6906 - val_loss: 0.6275 - val_accuracy: 0.6383
Epoch 68/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5608 - accuracy: 0.6906 - val_loss: 0.6275 - val_accuracy: 0.6383
Epoch 69/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5607 - accuracy: 0.6906 - val_loss: 0.6275 - val_accuracy: 0.6383
Epoch 70/1000
139/139 [==============================] - ETA: 0s - loss: 0.5842 - accuracy: 0.68 - 0s 86us/step - loss: 0.5607 - accuracy: 0.6906 - val_loss: 0.6275 - val_accuracy: 0.6383
Epoch 71/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5606 - accuracy: 0.6906 - val_loss: 0.6274 - val_accuracy: 0.6383
Epoch 72/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5606 - accuracy: 0.6906 - val_loss: 0.6273 - val_accuracy: 0.6383
Epoch 73/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5605 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00073: ReduceLROnPlateau reducing learning rate to 5.4687501688022166e-05.
Epoch 74/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5605 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 75/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5605 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 76/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5604 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 77/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5604 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 78/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5604 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 79/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5604 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 80/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5603 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 81/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5603 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 82/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5603 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 83/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5603 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00083: ReduceLROnPlateau reducing learning rate to 2.7343750844011083e-05.
Epoch 84/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5603 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 85/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 86/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 87/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 88/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 89/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 90/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 91/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 92/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 93/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00093: ReduceLROnPlateau reducing learning rate to 1.3671875422005542e-05.
Epoch 94/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5602 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 95/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 96/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 97/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 98/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 99/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 100/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 101/1000
139/139 [==============================] - ETA: 0s - loss: 0.5625 - accuracy: 0.68 - 0s 93us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 102/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 103/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00103: ReduceLROnPlateau reducing learning rate to 6.835937711002771e-06.
Epoch 104/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 105/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 106/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 107/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 108/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 109/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 110/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 111/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 112/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 113/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00113: ReduceLROnPlateau reducing learning rate to 3.4179688555013854e-06.
Epoch 114/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 115/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 116/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 117/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 118/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 119/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 120/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 121/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 122/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 123/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00123: ReduceLROnPlateau reducing learning rate to 1.7089844277506927e-06.
Epoch 124/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 125/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 126/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 127/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 128/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 129/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 130/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5601 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 131/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 132/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 133/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00133: ReduceLROnPlateau reducing learning rate to 8.544922138753464e-07.
Epoch 134/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 135/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 136/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 137/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 138/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 139/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 140/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 141/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 142/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 143/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00143: ReduceLROnPlateau reducing learning rate to 4.272461069376732e-07.
Epoch 144/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 145/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 146/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 147/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 148/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 149/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 150/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 151/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 152/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 153/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00153: ReduceLROnPlateau reducing learning rate to 2.136230534688366e-07.
Epoch 154/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 155/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 156/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 157/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 158/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 159/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 160/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 161/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 162/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 163/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00163: ReduceLROnPlateau reducing learning rate to 1.068115267344183e-07.
Epoch 164/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 165/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 166/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 167/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 168/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 169/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 170/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 171/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 172/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 173/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00173: ReduceLROnPlateau reducing learning rate to 5.340576336720915e-08.
Epoch 174/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 175/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 176/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 177/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 178/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 179/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 180/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 181/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 182/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 183/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00183: ReduceLROnPlateau reducing learning rate to 2.6702881683604573e-08.
Epoch 184/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 185/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 186/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 187/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 188/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 189/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 190/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 191/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 192/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 193/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00193: ReduceLROnPlateau reducing learning rate to 1.3351440841802287e-08.
Epoch 194/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 195/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 196/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 197/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 198/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 199/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 200/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 201/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 202/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 203/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00203: ReduceLROnPlateau reducing learning rate to 6.675720420901143e-09.
Epoch 204/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 205/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 206/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 207/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 208/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 209/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 210/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 211/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 212/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 213/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00213: ReduceLROnPlateau reducing learning rate to 3.3378602104505717e-09.
Epoch 214/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 215/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 216/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 217/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 218/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 219/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 220/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 221/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 222/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 223/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00223: ReduceLROnPlateau reducing learning rate to 1.6689301052252858e-09.
Epoch 224/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 225/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 226/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 227/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 228/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 229/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 230/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 231/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 232/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 233/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00233: ReduceLROnPlateau reducing learning rate to 8.344650526126429e-10.
Epoch 234/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 235/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 236/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 237/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 238/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 239/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 240/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 241/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 242/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 243/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00243: ReduceLROnPlateau reducing learning rate to 4.1723252630632146e-10.
Epoch 244/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 245/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 246/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 247/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 248/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 249/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 250/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 251/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 252/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 253/1000
139/139 [==============================] - 0s 58us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00253: ReduceLROnPlateau reducing learning rate to 2.0861626315316073e-10.
Epoch 254/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 255/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 256/1000
139/139 [==============================] - 0s 72us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 257/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 258/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 259/1000
139/139 [==============================] - 0s 65us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 260/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 261/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 262/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 263/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00263: ReduceLROnPlateau reducing learning rate to 1.0430813157658037e-10.
Epoch 264/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 265/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 266/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 267/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 268/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 269/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 270/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 271/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 272/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 273/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00273: ReduceLROnPlateau reducing learning rate to 5.215406578829018e-11.
Epoch 274/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 275/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 276/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 277/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 278/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 279/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 280/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 281/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 282/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 283/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00283: ReduceLROnPlateau reducing learning rate to 2.607703289414509e-11.
Epoch 284/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 285/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 286/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 287/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 288/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 289/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 290/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 291/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 292/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 293/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00293: ReduceLROnPlateau reducing learning rate to 1.3038516447072546e-11.
Epoch 294/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 295/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 296/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 297/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 298/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 299/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 300/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 301/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 302/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 303/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00303: ReduceLROnPlateau reducing learning rate to 6.519258223536273e-12.
Epoch 304/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 305/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 306/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 307/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 308/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 309/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 310/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 311/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 312/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 313/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00313: ReduceLROnPlateau reducing learning rate to 3.2596291117681364e-12.
Epoch 314/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 315/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 316/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 317/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 318/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 319/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 320/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 321/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 322/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 323/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00323: ReduceLROnPlateau reducing learning rate to 1.6298145558840682e-12.
Epoch 324/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 325/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 326/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 327/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 328/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 329/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 330/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 331/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 332/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 333/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00333: ReduceLROnPlateau reducing learning rate to 8.149072779420341e-13.
Epoch 334/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 335/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 336/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 337/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 338/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 339/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 340/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 341/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 342/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 343/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00343: ReduceLROnPlateau reducing learning rate to 4.0745363897101705e-13.
Epoch 344/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 345/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 346/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 347/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 348/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 349/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 350/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 351/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 352/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 353/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00353: ReduceLROnPlateau reducing learning rate to 2.0372681948550853e-13.
Epoch 354/1000
139/139 [==============================] - 0s 158us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 355/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 356/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 357/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 358/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 359/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 360/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 361/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 362/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 363/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00363: ReduceLROnPlateau reducing learning rate to 1.0186340974275426e-13.
Epoch 364/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 365/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 366/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 367/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 368/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 369/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 370/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 371/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 372/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 373/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00373: ReduceLROnPlateau reducing learning rate to 5.093170487137713e-14.
Epoch 374/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 375/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 376/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 377/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 378/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 379/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 380/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 381/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 382/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 383/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00383: ReduceLROnPlateau reducing learning rate to 2.5465852435688566e-14.
Epoch 384/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 385/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 386/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 387/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 388/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 389/1000
139/139 [==============================] - ETA: 0s - loss: 0.6763 - accuracy: 0.56 - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 390/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 391/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 392/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 393/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00393: ReduceLROnPlateau reducing learning rate to 1.2732926217844283e-14.
Epoch 394/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 395/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 396/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 397/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 398/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 399/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 400/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 401/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 402/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 403/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00403: ReduceLROnPlateau reducing learning rate to 6.3664631089221414e-15.
Epoch 404/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 405/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 406/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 407/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 408/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 409/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 410/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 411/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 412/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 413/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00413: ReduceLROnPlateau reducing learning rate to 3.1832315544610707e-15.
Epoch 414/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 415/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 416/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 417/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 418/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 419/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 420/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 421/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 422/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 423/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00423: ReduceLROnPlateau reducing learning rate to 1.5916157772305354e-15.
Epoch 424/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 425/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 426/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 427/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 428/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 429/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 430/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 431/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 432/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 433/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00433: ReduceLROnPlateau reducing learning rate to 7.958078886152677e-16.
Epoch 434/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 435/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 436/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 437/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 438/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 439/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 440/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 441/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 442/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 443/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00443: ReduceLROnPlateau reducing learning rate to 3.9790394430763384e-16.
Epoch 444/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 445/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 446/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 447/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 448/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 449/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 450/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 451/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 452/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 453/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00453: ReduceLROnPlateau reducing learning rate to 1.9895197215381692e-16.
Epoch 454/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 455/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 456/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 457/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 458/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 459/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 460/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 461/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 462/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 463/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00463: ReduceLROnPlateau reducing learning rate to 9.947598607690846e-17.
Epoch 464/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 465/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 466/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 467/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 468/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 469/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 470/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 471/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 472/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 473/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00473: ReduceLROnPlateau reducing learning rate to 4.973799303845423e-17.
Epoch 474/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 475/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 476/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 477/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 478/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 479/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 480/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 481/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 482/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 483/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00483: ReduceLROnPlateau reducing learning rate to 2.4868996519227115e-17.
Epoch 484/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 485/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 486/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 487/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 488/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 489/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 490/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 491/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 492/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 493/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00493: ReduceLROnPlateau reducing learning rate to 1.2434498259613557e-17.
Epoch 494/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 495/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 496/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 497/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 498/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 499/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 500/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 501/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 502/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 503/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00503: ReduceLROnPlateau reducing learning rate to 6.217249129806779e-18.
Epoch 504/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 505/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 506/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 507/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 508/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 509/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 510/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 511/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 512/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 513/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00513: ReduceLROnPlateau reducing learning rate to 3.1086245649033894e-18.
Epoch 514/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 515/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 516/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 517/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 518/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 519/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 520/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 521/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 522/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 523/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00523: ReduceLROnPlateau reducing learning rate to 1.5543122824516947e-18.
Epoch 524/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 525/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 526/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 527/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 528/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 529/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 530/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 531/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 532/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 533/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00533: ReduceLROnPlateau reducing learning rate to 7.771561412258473e-19.
Epoch 534/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 535/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 536/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 537/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 538/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 539/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 540/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 541/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 542/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 543/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00543: ReduceLROnPlateau reducing learning rate to 3.8857807061292367e-19.
Epoch 544/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 545/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 546/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 547/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 548/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 549/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 550/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 551/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 552/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 553/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00553: ReduceLROnPlateau reducing learning rate to 1.9428903530646184e-19.
Epoch 554/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 555/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 556/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 557/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 558/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 559/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 560/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 561/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 562/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 563/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00563: ReduceLROnPlateau reducing learning rate to 9.714451765323092e-20.
Epoch 564/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 565/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 566/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 567/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 568/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 569/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 570/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 571/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 572/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 573/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00573: ReduceLROnPlateau reducing learning rate to 4.857225882661546e-20.
Epoch 574/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 575/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 576/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 577/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 578/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 579/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 580/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 581/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 582/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 583/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00583: ReduceLROnPlateau reducing learning rate to 2.428612941330773e-20.
Epoch 584/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 585/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 586/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 587/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 588/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 589/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 590/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 591/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 592/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 593/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00593: ReduceLROnPlateau reducing learning rate to 1.2143064706653865e-20.
Epoch 594/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 595/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 596/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 597/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 598/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 599/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 600/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 601/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 602/1000
139/139 [==============================] - 0s 158us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 603/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00603: ReduceLROnPlateau reducing learning rate to 6.071532353326932e-21.
Epoch 604/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 605/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 606/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 607/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 608/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 609/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 610/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 611/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 612/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 613/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00613: ReduceLROnPlateau reducing learning rate to 3.035766176663466e-21.
Epoch 614/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 615/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 616/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 617/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 618/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 619/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 620/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 621/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 622/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 623/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00623: ReduceLROnPlateau reducing learning rate to 1.517883088331733e-21.
Epoch 624/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 625/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 626/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 627/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 628/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 629/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 630/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 631/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 632/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 633/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00633: ReduceLROnPlateau reducing learning rate to 7.589415441658665e-22.
Epoch 634/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 635/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 636/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 637/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 638/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 639/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 640/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 641/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 642/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 643/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00643: ReduceLROnPlateau reducing learning rate to 3.7947077208293327e-22.
Epoch 644/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 645/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 646/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 647/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 648/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 649/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 650/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 651/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 652/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 653/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00653: ReduceLROnPlateau reducing learning rate to 1.8973538604146664e-22.
Epoch 654/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 655/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 656/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 657/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 658/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 659/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 660/1000
139/139 [==============================] - 0s 151us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 661/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 662/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 663/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00663: ReduceLROnPlateau reducing learning rate to 9.486769302073332e-23.
Epoch 664/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 665/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 666/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 667/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 668/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 669/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 670/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 671/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 672/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 673/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00673: ReduceLROnPlateau reducing learning rate to 4.743384651036666e-23.
Epoch 674/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 675/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 676/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 677/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 678/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 679/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 680/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 681/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 682/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 683/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00683: ReduceLROnPlateau reducing learning rate to 2.371692325518333e-23.
Epoch 684/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 685/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 686/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 687/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 688/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 689/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 690/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 691/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 692/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 693/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00693: ReduceLROnPlateau reducing learning rate to 1.1858461627591665e-23.
Epoch 694/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 695/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 696/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 697/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 698/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 699/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 700/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 701/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 702/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 703/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00703: ReduceLROnPlateau reducing learning rate to 5.9292308137958324e-24.
Epoch 704/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 705/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 706/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 707/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 708/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 709/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 710/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 711/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 712/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 713/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00713: ReduceLROnPlateau reducing learning rate to 2.9646154068979162e-24.
Epoch 714/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 715/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 716/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 717/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 718/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 719/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 720/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 721/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 722/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 723/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00723: ReduceLROnPlateau reducing learning rate to 1.4823077034489581e-24.
Epoch 724/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 725/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 726/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 727/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 728/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 729/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 730/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 731/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 732/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 733/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00733: ReduceLROnPlateau reducing learning rate to 7.4115385172447905e-25.
Epoch 734/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 735/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 736/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 737/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 738/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 739/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 740/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 741/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 742/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 743/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00743: ReduceLROnPlateau reducing learning rate to 3.7057692586223952e-25.
Epoch 744/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 745/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 746/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 747/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 748/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 749/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 750/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 751/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 752/1000
139/139 [==============================] - 0s 165us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 753/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00753: ReduceLROnPlateau reducing learning rate to 1.8528846293111976e-25.
Epoch 754/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 755/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 756/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 757/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 758/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 759/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 760/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 761/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 762/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 763/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00763: ReduceLROnPlateau reducing learning rate to 9.264423146555988e-26.
Epoch 764/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 765/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 766/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 767/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 768/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 769/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 770/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 771/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 772/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 773/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00773: ReduceLROnPlateau reducing learning rate to 4.632211573277994e-26.
Epoch 774/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 775/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 776/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 777/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 778/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 779/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 780/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 781/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 782/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 783/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00783: ReduceLROnPlateau reducing learning rate to 2.316105786638997e-26.
Epoch 784/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 785/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 786/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 787/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 788/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 789/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 790/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 791/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 792/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 793/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00793: ReduceLROnPlateau reducing learning rate to 1.1580528933194985e-26.
Epoch 794/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 795/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 796/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 797/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 798/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 799/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 800/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 801/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 802/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 803/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00803: ReduceLROnPlateau reducing learning rate to 5.7902644665974926e-27.
Epoch 804/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 805/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 806/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 807/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 808/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 809/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 810/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 811/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 812/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 813/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00813: ReduceLROnPlateau reducing learning rate to 2.8951322332987463e-27.
Epoch 814/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 815/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 816/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 817/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 818/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 819/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 820/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 821/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 822/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 823/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00823: ReduceLROnPlateau reducing learning rate to 1.4475661166493731e-27.
Epoch 824/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 825/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 826/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 827/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 828/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 829/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 830/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 831/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 832/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 833/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00833: ReduceLROnPlateau reducing learning rate to 7.237830583246866e-28.
Epoch 834/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 835/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 836/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 837/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 838/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 839/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 840/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 841/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 842/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 843/1000
139/139 [==============================] - 0s 79us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00843: ReduceLROnPlateau reducing learning rate to 3.618915291623433e-28.
Epoch 844/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 845/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 846/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 847/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 848/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 849/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 850/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 851/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 852/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 853/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00853: ReduceLROnPlateau reducing learning rate to 1.8094576458117164e-28.
Epoch 854/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 855/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 856/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 857/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 858/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 859/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 860/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 861/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 862/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 863/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00863: ReduceLROnPlateau reducing learning rate to 9.047288229058582e-29.
Epoch 864/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 865/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 866/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 867/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 868/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 869/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 870/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 871/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 872/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 873/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00873: ReduceLROnPlateau reducing learning rate to 4.523644114529291e-29.
Epoch 874/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 875/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 876/1000
139/139 [==============================] - 0s 165us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 877/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 878/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 879/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 880/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 881/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 882/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 883/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00883: ReduceLROnPlateau reducing learning rate to 2.2618220572646455e-29.
Epoch 884/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 885/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 886/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 887/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 888/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 889/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 890/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 891/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 892/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 893/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00893: ReduceLROnPlateau reducing learning rate to 1.1309110286323228e-29.
Epoch 894/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 895/1000
139/139 [==============================] - ETA: 0s - loss: 0.5519 - accuracy: 0.65 - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 896/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 897/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 898/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 899/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 900/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 901/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 902/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 903/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00903: ReduceLROnPlateau reducing learning rate to 5.654555143161614e-30.
Epoch 904/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 905/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 906/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 907/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 908/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 909/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 910/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 911/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 912/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 913/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00913: ReduceLROnPlateau reducing learning rate to 2.827277571580807e-30.
Epoch 914/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 915/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 916/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 917/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 918/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 919/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 920/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 921/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 922/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 923/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00923: ReduceLROnPlateau reducing learning rate to 1.4136387857904035e-30.
Epoch 924/1000
139/139 [==============================] - 0s 151us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 925/1000
139/139 [==============================] - 0s 151us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 926/1000
139/139 [==============================] - 0s 144us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 927/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 928/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 929/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 930/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 931/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 932/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 933/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00933: ReduceLROnPlateau reducing learning rate to 7.068193928952017e-31.
Epoch 934/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 935/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 936/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 937/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 938/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 939/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 940/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 941/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 942/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 943/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00943: ReduceLROnPlateau reducing learning rate to 3.5340969644760086e-31.
Epoch 944/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 945/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 946/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 947/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 948/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 949/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 950/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 951/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 952/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 953/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00953: ReduceLROnPlateau reducing learning rate to 1.7670484822380043e-31.
Epoch 954/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 955/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 956/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 957/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 958/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 959/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 960/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 961/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 962/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 963/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00963: ReduceLROnPlateau reducing learning rate to 8.835242411190022e-32.
Epoch 964/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 965/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 966/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 967/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 968/1000
139/139 [==============================] - 0s 129us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 969/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 970/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 971/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 972/1000
139/139 [==============================] - 0s 137us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 973/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00973: ReduceLROnPlateau reducing learning rate to 4.417621205595011e-32.
Epoch 974/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 975/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 976/1000
139/139 [==============================] - 0s 86us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 977/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 978/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 979/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 980/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 981/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 982/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 983/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00983: ReduceLROnPlateau reducing learning rate to 2.2088106027975054e-32.
Epoch 984/1000
139/139 [==============================] - 0s 165us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 985/1000
139/139 [==============================] - 0s 151us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 986/1000
139/139 [==============================] - 0s 180us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 987/1000
139/139 [==============================] - 0s 151us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 988/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 989/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 990/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 991/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 992/1000
139/139 [==============================] - 0s 115us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 993/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383

Epoch 00993: ReduceLROnPlateau reducing learning rate to 1.1044053013987527e-32.
Epoch 994/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 995/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 996/1000
139/139 [==============================] - 0s 108us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 997/1000
139/139 [==============================] - 0s 122us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 998/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 999/1000
139/139 [==============================] - 0s 101us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
Epoch 1000/1000
139/139 [==============================] - 0s 93us/step - loss: 0.5600 - accuracy: 0.6906 - val_loss: 0.6272 - val_accuracy: 0.6383
In [366]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 1000)
In [367]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
47/47 [==============================] - 0s 64us/step
test loss: 0.6271742037002076, test accuracy: 0.6382978558540344
In [368]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.6725490196078431
In [369]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.0

KMeans

In [302]:
X
Out[302]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 1.347152 0.215026 0.572244 1.090273 0.291013 0.262601
1 0.820325 -1.142202 -0.584840 -1.206404 0.498688 -0.437268
2 -0.368979 -1.101005 1.316826 1.007755 -0.300511 -1.688750
3 0.137511 -1.462891 1.217279 0.145184 -0.233709 -0.483200
4 0.067094 -1.364744 1.323658 -0.374292 -0.763515 -1.012955
5 -0.308205 1.554912 0.177715 -0.789365 0.203522 -0.138956
6 -0.229900 1.130218 -0.432863 -0.017215 -0.028738 0.374753
7 0.128919 -2.154466 -1.473142 -0.706391 0.582734 -1.413063
8 -0.428809 1.078779 0.122401 0.073561 1.095684 0.487808
9 0.508739 -1.275256 0.146667 0.082460 0.735466 -0.812000
10 0.236456 -1.307369 -0.087549 -1.929049 -0.896450 -0.085837
11 -0.332097 -2.136115 -0.379978 0.106336 0.131067 -0.494191
12 0.878776 -0.818718 -0.318688 -0.206650 -0.135265 -1.505040
13 -0.503536 -1.372498 -0.663743 -0.059237 0.168824 0.591813
14 -0.225399 -2.333299 -0.977473 -0.503168 0.734560 -0.229386
15 1.637443 0.565792 1.742275 -0.855040 0.086757 -1.781283
16 1.593976 -0.388711 -1.659037 0.097761 0.429256 2.040221
17 1.453127 -0.044197 0.718333 0.786724 -0.814130 -0.463517
18 0.574197 -0.311820 -1.315952 0.601889 -0.114239 1.343490
19 1.285525 0.753218 -0.678607 1.972188 0.383330 1.855540
20 -0.420448 -0.248495 0.126628 -0.407281 0.328979 0.844612
21 0.782508 -0.432480 -1.645501 -0.097137 -0.510175 0.780762
22 1.220720 0.555439 1.643752 1.016671 0.036438 -0.204417
23 1.276232 -0.970408 -2.218229 -1.985819 0.181550 0.875360
24 0.330249 0.500340 0.945837 0.170144 -1.951811 0.728661
25 -0.457742 1.246710 1.511768 -0.330001 -0.839662 0.686756
26 -0.128012 0.619401 1.421912 -0.281963 -2.498478 0.602396
27 1.567570 -0.230001 1.237739 0.280013 0.089273 -0.372351
28 -0.343287 -1.213337 1.470523 -0.507041 -1.296801 -0.318921
29 -1.776654 -1.197082 0.552844 -0.390964 -0.822647 -0.655686
... ... ... ... ... ... ...
156 1.136603 1.061921 -1.369527 0.349165 -0.306267 1.859464
157 -0.957141 0.412565 -0.099705 0.507263 0.380704 -1.450660
158 0.627291 -0.982430 0.247369 -0.335773 -0.880954 1.943513
159 0.605582 -0.713493 -0.033525 0.066891 -0.330645 0.762021
160 0.435082 -1.765650 -0.206944 -2.176473 -1.106134 1.293098
161 -0.869679 -0.284973 -0.185255 1.340517 -1.710248 -1.319612
162 1.695010 0.478353 -2.356288 0.126103 -0.667751 -0.738760
163 1.201750 -0.589959 -0.583958 1.265372 2.093314 0.102967
164 0.602223 1.408737 0.093544 -1.913840 0.673872 -1.282054
165 1.273699 0.835188 0.238069 -0.918903 0.953651 0.759154
166 0.910091 0.783406 -0.282440 -0.757994 -0.153400 0.541408
167 0.784319 -0.628463 0.531487 -0.758651 -0.436559 0.237206
168 0.597174 -0.260556 0.776122 0.565709 -0.698971 0.850620
169 0.512245 -0.906993 0.867897 0.635716 -0.539661 0.041101
170 0.766694 0.045665 1.652744 -0.681039 -2.002121 -0.208060
171 0.729920 -0.012315 1.424850 -1.133758 -1.844432 -0.865224
172 -0.250595 1.333637 0.735398 -2.185661 -0.923218 -0.293878
173 0.239742 1.083681 -0.966488 -0.543890 -0.015042 0.298120
174 0.318041 0.919114 -0.293494 -1.777415 -0.452843 0.237000
175 -0.249524 -1.310616 -1.629695 0.599616 1.389241 0.190145
176 -0.239498 -0.884659 -1.273002 0.946824 0.600415 -0.458834
177 -0.247995 -0.379289 0.038455 -0.181334 0.555713 -0.343901
178 -1.538412 0.875654 -1.387902 -0.180395 0.008275 -0.438000
179 -0.858788 -0.062931 -0.178983 -0.593047 1.845652 -1.559059
180 0.044348 0.420883 -0.678748 1.238163 1.329929 -0.157668
181 1.376463 -1.287646 0.773584 1.157342 -0.286084 -1.054513
182 0.948949 -1.179621 1.457273 1.080025 1.950553 0.140886
183 1.235651 -0.462711 0.631757 -1.169777 -0.693590 0.281712
184 0.668597 1.124688 -1.524719 0.905841 -1.384876 0.539531
185 -0.966564 0.725873 0.168576 -0.106983 -0.071778 -0.257675

186 rows × 6 columns

In [303]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[303]:
[1116.0000000000002,
 927.5012922756229,
 805.362167602635,
 709.3398851775523,
 644.8353925193064,
 590.0040583169643,
 547.564638663471,
 517.2051050812995,
 492.673077864197,
 467.91458811999206,
 449.82839909018185,
 426.939832241735,
 408.8927579760986,
 392.386338225884]
In [304]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[304]:
[<matplotlib.lines.Line2D at 0x1e8302657f0>]

K=4

In [305]:
kmeans_tc = KMeans(n_clusters=4, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[305]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=4, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [306]:
kmeans_tc.labels_
Out[306]:
array([1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1,
       2, 1, 0, 0, 0, 2, 2, 0, 0, 0, 0, 2, 1, 1, 3, 0, 0, 2, 2, 2, 1, 1,
       1, 0, 3, 3, 3, 0, 3, 3, 1, 1, 1, 0, 3, 0, 1, 1, 1, 1, 2, 2, 1, 0,
       2, 0, 0, 3, 3, 0, 0, 1, 1, 0, 3, 3, 1, 2, 2, 2, 2, 2, 1, 2, 3, 3,
       0, 0, 0, 2, 1, 1, 3, 3, 3, 0, 2, 0, 3, 3, 3, 1, 2, 0, 3, 0, 1, 3,
       2, 0, 0, 3, 0, 0, 0, 0, 2, 0, 2, 2, 3, 2, 2, 1, 2, 3, 2, 1, 2, 1,
       1, 1, 3, 3, 1, 3, 1, 0, 3, 1, 3, 2, 3, 3, 3, 3, 0, 0, 0, 3, 0, 3,
       1, 2, 1, 3, 1, 1, 2, 0, 1, 1, 3, 1, 1, 2, 2, 2, 2, 2, 3, 1, 1, 3,
       3, 3, 3, 3, 3, 2, 2, 2, 1, 3])
In [307]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[307]:
array([1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1,
       2, 1, 0, 0, 0, 2, 2, 0, 0, 0, 0, 2, 1, 1, 3, 0, 0, 2, 2, 2, 1, 1,
       1, 0, 3, 3, 3, 0, 3, 3, 1, 1, 1, 0, 3, 0, 1, 1, 1, 1, 2, 2, 1, 0,
       2, 0, 0, 3, 3, 0, 0, 1, 1, 0, 3, 3, 1, 2, 2, 2, 2, 2, 1, 2, 3, 3,
       0, 0, 0, 2, 1, 1, 3, 3, 3, 0, 2, 0, 3, 3, 3, 1, 2, 0, 3, 0, 1, 3,
       2, 0, 0, 3, 0, 0, 0, 0, 2, 0, 2, 2, 3, 2, 2, 1, 2, 3, 2, 1, 2, 1,
       1, 1, 3, 3, 1, 3, 1, 0, 3, 1, 3, 2, 3, 3, 3, 3, 0, 0, 0, 3, 0, 3,
       1, 2, 1, 3, 1, 1, 2, 0, 1, 1, 3, 1, 1, 2, 2, 2, 2, 2, 3, 1, 1, 3,
       3, 3, 3, 3, 3, 2, 2, 2, 1, 3])
In [308]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [309]:
X
Out[309]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 1.347152 0.215026 0.572244 1.090273 0.291013 0.262601 1 0
1 0.820325 -1.142202 -0.584840 -1.206404 0.498688 -0.437268 2 0
2 -0.368979 -1.101005 1.316826 1.007755 -0.300511 -1.688750 2 0
3 0.137511 -1.462891 1.217279 0.145184 -0.233709 -0.483200 2 0
4 0.067094 -1.364744 1.323658 -0.374292 -0.763515 -1.012955 2 0
5 -0.308205 1.554912 0.177715 -0.789365 0.203522 -0.138956 3 0
6 -0.229900 1.130218 -0.432863 -0.017215 -0.028738 0.374753 1 0
7 0.128919 -2.154466 -1.473142 -0.706391 0.582734 -1.413063 2 0
8 -0.428809 1.078779 0.122401 0.073561 1.095684 0.487808 3 0
9 0.508739 -1.275256 0.146667 0.082460 0.735466 -0.812000 2 0
10 0.236456 -1.307369 -0.087549 -1.929049 -0.896450 -0.085837 2 0
11 -0.332097 -2.136115 -0.379978 0.106336 0.131067 -0.494191 2 0
12 0.878776 -0.818718 -0.318688 -0.206650 -0.135265 -1.505040 2 0
13 -0.503536 -1.372498 -0.663743 -0.059237 0.168824 0.591813 1 0
14 -0.225399 -2.333299 -0.977473 -0.503168 0.734560 -0.229386 2 0
15 1.637443 0.565792 1.742275 -0.855040 0.086757 -1.781283 2 0
16 1.593976 -0.388711 -1.659037 0.097761 0.429256 2.040221 1 0
17 1.453127 -0.044197 0.718333 0.786724 -0.814130 -0.463517 2 0
18 0.574197 -0.311820 -1.315952 0.601889 -0.114239 1.343490 1 0
19 1.285525 0.753218 -0.678607 1.972188 0.383330 1.855540 1 0
20 -0.420448 -0.248495 0.126628 -0.407281 0.328979 0.844612 1 0
21 0.782508 -0.432480 -1.645501 -0.097137 -0.510175 0.780762 1 0
22 1.220720 0.555439 1.643752 1.016671 0.036438 -0.204417 2 0
23 1.276232 -0.970408 -2.218229 -1.985819 0.181550 0.875360 1 0
24 0.330249 0.500340 0.945837 0.170144 -1.951811 0.728661 0 0
25 -0.457742 1.246710 1.511768 -0.330001 -0.839662 0.686756 0 0
26 -0.128012 0.619401 1.421912 -0.281963 -2.498478 0.602396 0 0
27 1.567570 -0.230001 1.237739 0.280013 0.089273 -0.372351 2 0
28 -0.343287 -1.213337 1.470523 -0.507041 -1.296801 -0.318921 2 0
29 -1.776654 -1.197082 0.552844 -0.390964 -0.822647 -0.655686 0 0
... ... ... ... ... ... ... ... ...
156 1.136603 1.061921 -1.369527 0.349165 -0.306267 1.859464 1 1
157 -0.957141 0.412565 -0.099705 0.507263 0.380704 -1.450660 3 1
158 0.627291 -0.982430 0.247369 -0.335773 -0.880954 1.943513 1 1
159 0.605582 -0.713493 -0.033525 0.066891 -0.330645 0.762021 1 1
160 0.435082 -1.765650 -0.206944 -2.176473 -1.106134 1.293098 2 1
161 -0.869679 -0.284973 -0.185255 1.340517 -1.710248 -1.319612 0 1
162 1.695010 0.478353 -2.356288 0.126103 -0.667751 -0.738760 1 1
163 1.201750 -0.589959 -0.583958 1.265372 2.093314 0.102967 1 1
164 0.602223 1.408737 0.093544 -1.913840 0.673872 -1.282054 3 1
165 1.273699 0.835188 0.238069 -0.918903 0.953651 0.759154 1 1
166 0.910091 0.783406 -0.282440 -0.757994 -0.153400 0.541408 1 1
167 0.784319 -0.628463 0.531487 -0.758651 -0.436559 0.237206 2 1
168 0.597174 -0.260556 0.776122 0.565709 -0.698971 0.850620 2 1
169 0.512245 -0.906993 0.867897 0.635716 -0.539661 0.041101 2 1
170 0.766694 0.045665 1.652744 -0.681039 -2.002121 -0.208060 2 1
171 0.729920 -0.012315 1.424850 -1.133758 -1.844432 -0.865224 2 1
172 -0.250595 1.333637 0.735398 -2.185661 -0.923218 -0.293878 3 1
173 0.239742 1.083681 -0.966488 -0.543890 -0.015042 0.298120 1 1
174 0.318041 0.919114 -0.293494 -1.777415 -0.452843 0.237000 1 1
175 -0.249524 -1.310616 -1.629695 0.599616 1.389241 0.190145 3 1
176 -0.239498 -0.884659 -1.273002 0.946824 0.600415 -0.458834 3 1
177 -0.247995 -0.379289 0.038455 -0.181334 0.555713 -0.343901 3 1
178 -1.538412 0.875654 -1.387902 -0.180395 0.008275 -0.438000 3 1
179 -0.858788 -0.062931 -0.178983 -0.593047 1.845652 -1.559059 3 1
180 0.044348 0.420883 -0.678748 1.238163 1.329929 -0.157668 3 1
181 1.376463 -1.287646 0.773584 1.157342 -0.286084 -1.054513 2 1
182 0.948949 -1.179621 1.457273 1.080025 1.950553 0.140886 2 1
183 1.235651 -0.462711 0.631757 -1.169777 -0.693590 0.281712 2 1
184 0.668597 1.124688 -1.524719 0.905841 -1.384876 0.539531 1 1
185 -0.966564 0.725873 0.168576 -0.106983 -0.071778 -0.257675 3 1

186 rows × 8 columns

In [310]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[310]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e8302a0dd8>
In [370]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[4]))

Specialized

ANN

In [371]:
X = df_n_ps_std_tc[4]
In [372]:
y = df_n_ps[4]['chosen']
In [373]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [374]:
X_train.shape
Out[374]:
(164, 6)
In [315]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [316]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [317]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [318]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30, 20, 10), 'learning_rate_init': 0.006, 'max_iter': 400}, que permiten obtener un Accuracy de 69.51% y un Kappa del 38.98
Tiempo total: 23.88 minutos
In [375]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30,20,10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.006
epochs = 400
In [376]:
input_tensor = Input(shape = (n0,))
In [377]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [378]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [379]:
model.summary()
Model: "model_19"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_19 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_59 (Dense)             (None, 30)                210       
_________________________________________________________________
dense_60 (Dense)             (None, 20)                620       
_________________________________________________________________
dense_61 (Dense)             (None, 10)                210       
_________________________________________________________________
dense_62 (Dense)             (None, 1)                 11        
=================================================================
Total params: 1,051
Trainable params: 1,051
Non-trainable params: 0
_________________________________________________________________
In [380]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 164 samples, validate on 55 samples
Epoch 1/400
164/164 [==============================] - 0s 1ms/step - loss: 0.7088 - accuracy: 0.5183 - val_loss: 0.7087 - val_accuracy: 0.5455
Epoch 2/400
164/164 [==============================] - 0s 67us/step - loss: 0.6622 - accuracy: 0.6037 - val_loss: 0.6899 - val_accuracy: 0.5273
Epoch 3/400
164/164 [==============================] - 0s 79us/step - loss: 0.6587 - accuracy: 0.5793 - val_loss: 0.6864 - val_accuracy: 0.5455
Epoch 4/400
164/164 [==============================] - 0s 73us/step - loss: 0.6552 - accuracy: 0.5671 - val_loss: 0.6980 - val_accuracy: 0.4909
Epoch 5/400
164/164 [==============================] - 0s 67us/step - loss: 0.6541 - accuracy: 0.5976 - val_loss: 0.7080 - val_accuracy: 0.5273
Epoch 6/400
164/164 [==============================] - 0s 67us/step - loss: 0.6436 - accuracy: 0.6280 - val_loss: 0.7127 - val_accuracy: 0.5091
Epoch 7/400
164/164 [==============================] - 0s 73us/step - loss: 0.6387 - accuracy: 0.6159 - val_loss: 0.7140 - val_accuracy: 0.5818
Epoch 8/400
164/164 [==============================] - 0s 67us/step - loss: 0.6370 - accuracy: 0.6402 - val_loss: 0.7113 - val_accuracy: 0.5273
Epoch 9/400
164/164 [==============================] - 0s 73us/step - loss: 0.6318 - accuracy: 0.6463 - val_loss: 0.6976 - val_accuracy: 0.4727
Epoch 10/400
164/164 [==============================] - 0s 73us/step - loss: 0.6245 - accuracy: 0.6402 - val_loss: 0.6985 - val_accuracy: 0.4727
Epoch 11/400
164/164 [==============================] - 0s 67us/step - loss: 0.6202 - accuracy: 0.6402 - val_loss: 0.7012 - val_accuracy: 0.5455
Epoch 12/400
164/164 [==============================] - 0s 73us/step - loss: 0.6181 - accuracy: 0.6402 - val_loss: 0.7132 - val_accuracy: 0.5455
Epoch 13/400
164/164 [==============================] - 0s 73us/step - loss: 0.6138 - accuracy: 0.6220 - val_loss: 0.7257 - val_accuracy: 0.5455
Epoch 14/400
164/164 [==============================] - 0s 79us/step - loss: 0.6097 - accuracy: 0.6341 - val_loss: 0.7276 - val_accuracy: 0.5455
Epoch 15/400
164/164 [==============================] - 0s 79us/step - loss: 0.6004 - accuracy: 0.6463 - val_loss: 0.7047 - val_accuracy: 0.5455
Epoch 16/400
164/164 [==============================] - 0s 67us/step - loss: 0.5904 - accuracy: 0.6585 - val_loss: 0.6892 - val_accuracy: 0.5455
Epoch 17/400
164/164 [==============================] - 0s 79us/step - loss: 0.5855 - accuracy: 0.7012 - val_loss: 0.6885 - val_accuracy: 0.5273

Epoch 00017: ReduceLROnPlateau reducing learning rate to 0.003000000026077032.
Epoch 18/400
164/164 [==============================] - 0s 104us/step - loss: 0.5810 - accuracy: 0.7073 - val_loss: 0.6960 - val_accuracy: 0.5455
Epoch 19/400
164/164 [==============================] - ETA: 0s - loss: 0.5621 - accuracy: 0.68 - 0s 91us/step - loss: 0.5759 - accuracy: 0.6951 - val_loss: 0.6962 - val_accuracy: 0.5636
Epoch 20/400
164/164 [==============================] - 0s 91us/step - loss: 0.5696 - accuracy: 0.7134 - val_loss: 0.6948 - val_accuracy: 0.5455
Epoch 21/400
164/164 [==============================] - 0s 73us/step - loss: 0.5661 - accuracy: 0.7073 - val_loss: 0.6971 - val_accuracy: 0.5455
Epoch 22/400
164/164 [==============================] - 0s 73us/step - loss: 0.5649 - accuracy: 0.6707 - val_loss: 0.7056 - val_accuracy: 0.5273
Epoch 23/400
164/164 [==============================] - 0s 67us/step - loss: 0.5603 - accuracy: 0.6768 - val_loss: 0.6949 - val_accuracy: 0.4909
Epoch 24/400
164/164 [==============================] - 0s 73us/step - loss: 0.5566 - accuracy: 0.7134 - val_loss: 0.6793 - val_accuracy: 0.5455
Epoch 25/400
164/164 [==============================] - 0s 85us/step - loss: 0.5509 - accuracy: 0.7317 - val_loss: 0.6861 - val_accuracy: 0.5455
Epoch 26/400
164/164 [==============================] - 0s 97us/step - loss: 0.5448 - accuracy: 0.7073 - val_loss: 0.7017 - val_accuracy: 0.5455
Epoch 27/400
164/164 [==============================] - 0s 73us/step - loss: 0.5386 - accuracy: 0.7378 - val_loss: 0.7016 - val_accuracy: 0.5818

Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.001500000013038516.
Epoch 28/400
164/164 [==============================] - 0s 79us/step - loss: 0.5355 - accuracy: 0.7378 - val_loss: 0.7024 - val_accuracy: 0.5818
Epoch 29/400
164/164 [==============================] - 0s 79us/step - loss: 0.5344 - accuracy: 0.7439 - val_loss: 0.7006 - val_accuracy: 0.5818
Epoch 30/400
164/164 [==============================] - 0s 73us/step - loss: 0.5321 - accuracy: 0.7317 - val_loss: 0.6973 - val_accuracy: 0.5818
Epoch 31/400
164/164 [==============================] - 0s 73us/step - loss: 0.5294 - accuracy: 0.7500 - val_loss: 0.6940 - val_accuracy: 0.6000
Epoch 32/400
164/164 [==============================] - 0s 73us/step - loss: 0.5256 - accuracy: 0.7439 - val_loss: 0.6926 - val_accuracy: 0.5636
Epoch 33/400
164/164 [==============================] - 0s 73us/step - loss: 0.5236 - accuracy: 0.7622 - val_loss: 0.6955 - val_accuracy: 0.5636
Epoch 34/400
164/164 [==============================] - 0s 79us/step - loss: 0.5239 - accuracy: 0.7561 - val_loss: 0.6986 - val_accuracy: 0.5455
Epoch 35/400
164/164 [==============================] - 0s 73us/step - loss: 0.5229 - accuracy: 0.7683 - val_loss: 0.6932 - val_accuracy: 0.5091
Epoch 36/400
164/164 [==============================] - 0s 73us/step - loss: 0.5181 - accuracy: 0.7805 - val_loss: 0.6923 - val_accuracy: 0.5636
Epoch 37/400
164/164 [==============================] - 0s 73us/step - loss: 0.5138 - accuracy: 0.7805 - val_loss: 0.6969 - val_accuracy: 0.5818
Epoch 38/400
164/164 [==============================] - 0s 73us/step - loss: 0.5126 - accuracy: 0.7866 - val_loss: 0.6948 - val_accuracy: 0.5455
Epoch 39/400
164/164 [==============================] - 0s 73us/step - loss: 0.5097 - accuracy: 0.7805 - val_loss: 0.6908 - val_accuracy: 0.5636
Epoch 40/400
164/164 [==============================] - 0s 73us/step - loss: 0.5074 - accuracy: 0.7805 - val_loss: 0.6882 - val_accuracy: 0.5818
Epoch 41/400
164/164 [==============================] - 0s 73us/step - loss: 0.5051 - accuracy: 0.7805 - val_loss: 0.6906 - val_accuracy: 0.5818

Epoch 00041: ReduceLROnPlateau reducing learning rate to 0.000750000006519258.
Epoch 42/400
164/164 [==============================] - 0s 73us/step - loss: 0.5034 - accuracy: 0.7744 - val_loss: 0.6922 - val_accuracy: 0.5818
Epoch 43/400
164/164 [==============================] - 0s 79us/step - loss: 0.5023 - accuracy: 0.7744 - val_loss: 0.6940 - val_accuracy: 0.5818
Epoch 44/400
164/164 [==============================] - 0s 73us/step - loss: 0.5019 - accuracy: 0.7744 - val_loss: 0.6961 - val_accuracy: 0.5636
Epoch 45/400
164/164 [==============================] - 0s 73us/step - loss: 0.5010 - accuracy: 0.7622 - val_loss: 0.6992 - val_accuracy: 0.5636
Epoch 46/400
164/164 [==============================] - 0s 73us/step - loss: 0.4997 - accuracy: 0.7683 - val_loss: 0.6997 - val_accuracy: 0.5636
Epoch 47/400
164/164 [==============================] - 0s 79us/step - loss: 0.4982 - accuracy: 0.7683 - val_loss: 0.6998 - val_accuracy: 0.5636
Epoch 48/400
164/164 [==============================] - 0s 110us/step - loss: 0.4969 - accuracy: 0.7683 - val_loss: 0.7021 - val_accuracy: 0.5636
Epoch 49/400
164/164 [==============================] - 0s 104us/step - loss: 0.4955 - accuracy: 0.7683 - val_loss: 0.7023 - val_accuracy: 0.5818
Epoch 50/400
164/164 [==============================] - 0s 91us/step - loss: 0.4943 - accuracy: 0.7683 - val_loss: 0.7007 - val_accuracy: 0.5818
Epoch 51/400
164/164 [==============================] - 0s 91us/step - loss: 0.4931 - accuracy: 0.7683 - val_loss: 0.6979 - val_accuracy: 0.5818

Epoch 00051: ReduceLROnPlateau reducing learning rate to 0.000375000003259629.
Epoch 52/400
164/164 [==============================] - 0s 73us/step - loss: 0.4913 - accuracy: 0.7805 - val_loss: 0.6963 - val_accuracy: 0.5636
Epoch 53/400
164/164 [==============================] - 0s 79us/step - loss: 0.4910 - accuracy: 0.7805 - val_loss: 0.6947 - val_accuracy: 0.5818
Epoch 54/400
164/164 [==============================] - 0s 79us/step - loss: 0.4902 - accuracy: 0.7866 - val_loss: 0.6944 - val_accuracy: 0.5818
Epoch 55/400
164/164 [==============================] - 0s 73us/step - loss: 0.4896 - accuracy: 0.7866 - val_loss: 0.6946 - val_accuracy: 0.5636
Epoch 56/400
164/164 [==============================] - 0s 79us/step - loss: 0.4889 - accuracy: 0.7866 - val_loss: 0.6945 - val_accuracy: 0.5636
Epoch 57/400
164/164 [==============================] - 0s 85us/step - loss: 0.4881 - accuracy: 0.7927 - val_loss: 0.6952 - val_accuracy: 0.5636
Epoch 58/400
164/164 [==============================] - 0s 73us/step - loss: 0.4879 - accuracy: 0.7927 - val_loss: 0.6949 - val_accuracy: 0.5636
Epoch 59/400
164/164 [==============================] - 0s 79us/step - loss: 0.4871 - accuracy: 0.7927 - val_loss: 0.6967 - val_accuracy: 0.5636
Epoch 60/400
164/164 [==============================] - 0s 98us/step - loss: 0.4867 - accuracy: 0.7988 - val_loss: 0.6976 - val_accuracy: 0.5636
Epoch 61/400
164/164 [==============================] - 0s 85us/step - loss: 0.4860 - accuracy: 0.7988 - val_loss: 0.6975 - val_accuracy: 0.5636

Epoch 00061: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145.
Epoch 62/400
164/164 [==============================] - 0s 79us/step - loss: 0.4855 - accuracy: 0.7988 - val_loss: 0.6974 - val_accuracy: 0.5636
Epoch 63/400
164/164 [==============================] - 0s 104us/step - loss: 0.4852 - accuracy: 0.8049 - val_loss: 0.6974 - val_accuracy: 0.5636
Epoch 64/400
164/164 [==============================] - 0s 79us/step - loss: 0.4849 - accuracy: 0.8049 - val_loss: 0.6973 - val_accuracy: 0.5636
Epoch 65/400
164/164 [==============================] - 0s 79us/step - loss: 0.4844 - accuracy: 0.7927 - val_loss: 0.6977 - val_accuracy: 0.5636
Epoch 66/400
164/164 [==============================] - 0s 79us/step - loss: 0.4840 - accuracy: 0.7927 - val_loss: 0.6983 - val_accuracy: 0.5636
Epoch 67/400
164/164 [==============================] - 0s 67us/step - loss: 0.4837 - accuracy: 0.7927 - val_loss: 0.6988 - val_accuracy: 0.5636
Epoch 68/400
164/164 [==============================] - 0s 67us/step - loss: 0.4835 - accuracy: 0.7988 - val_loss: 0.6992 - val_accuracy: 0.5455
Epoch 69/400
164/164 [==============================] - 0s 73us/step - loss: 0.4830 - accuracy: 0.7988 - val_loss: 0.6988 - val_accuracy: 0.5455
Epoch 70/400
164/164 [==============================] - 0s 67us/step - loss: 0.4829 - accuracy: 0.7988 - val_loss: 0.6984 - val_accuracy: 0.5455
Epoch 71/400
164/164 [==============================] - 0s 67us/step - loss: 0.4828 - accuracy: 0.7927 - val_loss: 0.6973 - val_accuracy: 0.5455

Epoch 00071: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05.
Epoch 72/400
164/164 [==============================] - 0s 73us/step - loss: 0.4824 - accuracy: 0.7988 - val_loss: 0.6970 - val_accuracy: 0.5455
Epoch 73/400
164/164 [==============================] - 0s 128us/step - loss: 0.4822 - accuracy: 0.8049 - val_loss: 0.6972 - val_accuracy: 0.5455
Epoch 74/400
164/164 [==============================] - 0s 79us/step - loss: 0.4820 - accuracy: 0.8110 - val_loss: 0.6971 - val_accuracy: 0.5455
Epoch 75/400
164/164 [==============================] - 0s 79us/step - loss: 0.4820 - accuracy: 0.8110 - val_loss: 0.6972 - val_accuracy: 0.5455
Epoch 76/400
164/164 [==============================] - 0s 73us/step - loss: 0.4818 - accuracy: 0.8110 - val_loss: 0.6969 - val_accuracy: 0.5455
Epoch 77/400
164/164 [==============================] - 0s 67us/step - loss: 0.4816 - accuracy: 0.8110 - val_loss: 0.6966 - val_accuracy: 0.5455
Epoch 78/400
164/164 [==============================] - 0s 79us/step - loss: 0.4814 - accuracy: 0.8110 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 79/400
164/164 [==============================] - 0s 79us/step - loss: 0.4812 - accuracy: 0.8110 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 80/400
164/164 [==============================] - 0s 73us/step - loss: 0.4811 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 81/400
164/164 [==============================] - 0s 73us/step - loss: 0.4810 - accuracy: 0.7988 - val_loss: 0.6963 - val_accuracy: 0.5455

Epoch 00081: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05.
Epoch 82/400
164/164 [==============================] - 0s 73us/step - loss: 0.4808 - accuracy: 0.7988 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 83/400
164/164 [==============================] - 0s 85us/step - loss: 0.4807 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 84/400
164/164 [==============================] - 0s 73us/step - loss: 0.4806 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 85/400
164/164 [==============================] - 0s 67us/step - loss: 0.4805 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 86/400
164/164 [==============================] - 0s 91us/step - loss: 0.4805 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 87/400
164/164 [==============================] - 0s 104us/step - loss: 0.4804 - accuracy: 0.8049 - val_loss: 0.6966 - val_accuracy: 0.5455
Epoch 88/400
164/164 [==============================] - 0s 91us/step - loss: 0.4804 - accuracy: 0.8049 - val_loss: 0.6967 - val_accuracy: 0.5455
Epoch 89/400
164/164 [==============================] - 0s 91us/step - loss: 0.4803 - accuracy: 0.8049 - val_loss: 0.6968 - val_accuracy: 0.5455
Epoch 90/400
164/164 [==============================] - 0s 73us/step - loss: 0.4802 - accuracy: 0.8049 - val_loss: 0.6969 - val_accuracy: 0.5455
Epoch 91/400
164/164 [==============================] - 0s 79us/step - loss: 0.4801 - accuracy: 0.8049 - val_loss: 0.6968 - val_accuracy: 0.5455

Epoch 00091: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05.
Epoch 92/400
164/164 [==============================] - 0s 67us/step - loss: 0.4800 - accuracy: 0.8049 - val_loss: 0.6967 - val_accuracy: 0.5455
Epoch 93/400
164/164 [==============================] - 0s 67us/step - loss: 0.4800 - accuracy: 0.8049 - val_loss: 0.6967 - val_accuracy: 0.5455
Epoch 94/400
164/164 [==============================] - 0s 73us/step - loss: 0.4800 - accuracy: 0.8049 - val_loss: 0.6966 - val_accuracy: 0.5455
Epoch 95/400
164/164 [==============================] - 0s 73us/step - loss: 0.4799 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 96/400
164/164 [==============================] - 0s 73us/step - loss: 0.4799 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 97/400
164/164 [==============================] - 0s 67us/step - loss: 0.4798 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 98/400
164/164 [==============================] - 0s 73us/step - loss: 0.4798 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 99/400
164/164 [==============================] - 0s 79us/step - loss: 0.4797 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 100/400
164/164 [==============================] - 0s 104us/step - loss: 0.4797 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 101/400
164/164 [==============================] - 0s 85us/step - loss: 0.4797 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455

Epoch 00101: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05.
Epoch 102/400
164/164 [==============================] - 0s 73us/step - loss: 0.4796 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 103/400
164/164 [==============================] - 0s 85us/step - loss: 0.4796 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 104/400
164/164 [==============================] - 0s 73us/step - loss: 0.4795 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 105/400
164/164 [==============================] - 0s 79us/step - loss: 0.4795 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 106/400
164/164 [==============================] - 0s 85us/step - loss: 0.4795 - accuracy: 0.8049 - val_loss: 0.6963 - val_accuracy: 0.5455
Epoch 107/400
164/164 [==============================] - 0s 79us/step - loss: 0.4795 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 108/400
164/164 [==============================] - 0s 73us/step - loss: 0.4794 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 109/400
164/164 [==============================] - 0s 73us/step - loss: 0.4794 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 110/400
164/164 [==============================] - 0s 73us/step - loss: 0.4794 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 111/400
164/164 [==============================] - 0s 73us/step - loss: 0.4794 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455

Epoch 00111: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06.
Epoch 112/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 113/400
164/164 [==============================] - 0s 67us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 114/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 115/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 116/400
164/164 [==============================] - 0s 79us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 117/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 118/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 119/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 120/400
164/164 [==============================] - 0s 79us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 121/400
164/164 [==============================] - 0s 73us/step - loss: 0.4793 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455

Epoch 00121: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06.
Epoch 122/400
164/164 [==============================] - 0s 67us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 123/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 124/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 125/400
164/164 [==============================] - 0s 85us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 126/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 127/400
164/164 [==============================] - 0s 91us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 128/400
164/164 [==============================] - 0s 79us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 129/400
164/164 [==============================] - 0s 79us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 130/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 131/400
164/164 [==============================] - 0s 85us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00131: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06.
Epoch 132/400
164/164 [==============================] - 0s 91us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 133/400
164/164 [==============================] - 0s 79us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 134/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 135/400
164/164 [==============================] - 0s 122us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 136/400
164/164 [==============================] - 0s 85us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 137/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6965 - val_accuracy: 0.5455
Epoch 138/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 139/400
164/164 [==============================] - 0s 122us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 140/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 141/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00141: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07.
Epoch 142/400
164/164 [==============================] - 0s 79us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 143/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 144/400
164/164 [==============================] - 0s 79us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 145/400
164/164 [==============================] - 0s 73us/step - loss: 0.4792 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 146/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 147/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 148/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 149/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 150/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 151/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00151: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07.
Epoch 152/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 153/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 154/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 155/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 156/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 157/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 158/400
164/164 [==============================] - 0s 122us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 159/400
164/164 [==============================] - 0s 110us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 160/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 161/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00161: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07.
Epoch 162/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 163/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 164/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 165/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 166/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 167/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 168/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 169/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 170/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 171/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00171: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08.
Epoch 172/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 173/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 174/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 175/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 176/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 177/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 178/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 179/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 180/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 181/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00181: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08.
Epoch 182/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 183/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 184/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 185/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 186/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 187/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 188/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 189/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 190/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 191/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00191: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08.
Epoch 192/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 193/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 194/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 195/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 196/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 197/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 198/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 199/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 200/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 201/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00201: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08.
Epoch 202/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 203/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 204/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 205/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 206/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 207/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 208/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 209/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 210/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 211/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00211: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09.
Epoch 212/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 213/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 214/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 215/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 216/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 217/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 218/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 219/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 220/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 221/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00221: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09.
Epoch 222/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 223/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 224/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 225/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 226/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 227/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 228/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 229/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 230/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 231/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00231: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09.
Epoch 232/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 233/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 234/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 235/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 236/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 237/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 238/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 239/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 240/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 241/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00241: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10.
Epoch 242/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 243/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 244/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 245/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 246/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 247/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 248/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 249/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 250/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 251/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00251: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10.
Epoch 252/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 253/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 254/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 255/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 256/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 257/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 258/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 259/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 260/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 261/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00261: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10.
Epoch 262/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 263/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 264/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 265/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 266/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 267/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 268/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 269/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 270/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 271/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00271: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11.
Epoch 272/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 273/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 274/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 275/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 276/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 277/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 278/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 279/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 280/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 281/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00281: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11.
Epoch 282/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 283/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 284/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 285/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 286/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 287/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 288/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 289/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 290/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 291/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00291: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11.
Epoch 292/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 293/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 294/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 295/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 296/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 297/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 298/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 299/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 300/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 301/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00301: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11.
Epoch 302/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 303/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 304/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 305/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 306/400
164/164 [==============================] - 0s 116us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 307/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 308/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 309/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 310/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 311/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00311: ReduceLROnPlateau reducing learning rate to 5.5879354962651284e-12.
Epoch 312/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 313/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 314/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 315/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 316/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 317/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 318/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 319/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 320/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 321/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00321: ReduceLROnPlateau reducing learning rate to 2.7939677481325642e-12.
Epoch 322/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 323/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 324/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 325/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 326/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 327/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 328/400
164/164 [==============================] - 0s 116us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 329/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 330/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 331/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00331: ReduceLROnPlateau reducing learning rate to 1.3969838740662821e-12.
Epoch 332/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 333/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 334/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 335/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 336/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 337/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 338/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 339/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 340/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 341/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00341: ReduceLROnPlateau reducing learning rate to 6.984919370331411e-13.
Epoch 342/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 343/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 344/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 345/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 346/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 347/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 348/400
164/164 [==============================] - 0s 128us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 349/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 350/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 351/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00351: ReduceLROnPlateau reducing learning rate to 3.4924596851657053e-13.
Epoch 352/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 353/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 354/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 355/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 356/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 357/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 358/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 359/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 360/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 361/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00361: ReduceLROnPlateau reducing learning rate to 1.7462298425828526e-13.
Epoch 362/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 363/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 364/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 365/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 366/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 367/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 368/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 369/400
164/164 [==============================] - 0s 122us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 370/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 371/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00371: ReduceLROnPlateau reducing learning rate to 8.731149212914263e-14.
Epoch 372/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 373/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 374/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 375/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 376/400
164/164 [==============================] - 0s 98us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 377/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 378/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 379/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 380/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 381/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00381: ReduceLROnPlateau reducing learning rate to 4.3655746064571316e-14.
Epoch 382/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 383/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 384/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 385/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 386/400
164/164 [==============================] - 0s 85us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 387/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 388/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 389/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 390/400
164/164 [==============================] - 0s 104us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 391/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455

Epoch 00391: ReduceLROnPlateau reducing learning rate to 2.1827873032285658e-14.
Epoch 392/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 393/400
164/164 [==============================] - 0s 73us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 394/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 395/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 396/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 397/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 398/400
164/164 [==============================] - 0s 91us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 399/400
164/164 [==============================] - 0s 79us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
Epoch 400/400
164/164 [==============================] - 0s 67us/step - loss: 0.4791 - accuracy: 0.8049 - val_loss: 0.6964 - val_accuracy: 0.5455
In [381]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
In [382]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
55/55 [==============================] - 0s 73us/step
test loss: 0.696373957937414, test accuracy: 0.5454545617103577
In [383]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.6058201058201058
In [384]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.09120951751487105

KMeans

In [329]:
X
Out[329]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 -1.035481 1.779354 1.874576 0.924814 -0.129662 2.608421
1 0.965487 -0.399971 -1.606069 0.008311 0.834341 0.513694
2 -0.141249 -1.969933 -0.960470 1.005123 -1.117123 -2.399517
3 -1.590590 -0.729741 -0.575342 0.587988 -0.885561 -0.752828
4 -0.391524 -0.894181 -0.426309 1.017585 -0.391173 -0.920259
5 -1.256622 -0.886861 -0.850243 0.516749 -0.491454 -0.072867
6 -1.579202 0.121365 -0.522749 1.012025 -0.547676 -0.140430
7 -1.760350 -0.182429 -0.008789 1.576085 -0.878841 0.252104
8 1.115526 1.555384 0.609404 0.558809 -0.514428 -0.221726
9 1.467291 1.402697 0.806896 -0.279535 0.939735 0.758333
10 0.972379 1.550575 -0.223468 0.899199 1.412818 0.386724
11 0.294385 0.890870 0.493531 0.142145 0.212432 1.463886
12 0.795134 0.176458 1.588747 -0.412034 -0.982878 -0.299581
13 0.694481 0.577820 0.319393 0.451356 0.219257 0.563199
14 1.169114 0.075245 -0.980006 1.330732 2.094068 1.785970
15 0.962642 0.380225 -1.261850 1.044019 1.339949 1.776870
16 1.352245 0.463507 -0.679184 0.941519 2.196602 1.991369
17 1.784002 -1.453636 -1.128885 -0.626496 0.399672 -0.605861
18 0.929212 -0.538274 -1.016394 -0.167176 0.557933 1.511009
19 1.199761 -0.727252 0.322239 -1.105069 -0.125311 -0.979170
20 -0.485056 0.796900 0.581966 1.884586 -0.705890 -0.725300
21 -0.547233 0.692440 -0.162284 2.025268 -0.631876 -0.337240
22 1.446103 -0.074850 -0.132752 -0.064117 -0.209506 -0.465551
23 -0.312063 0.030270 -1.160963 0.726155 -1.511552 -0.509175
24 1.175126 -0.143713 -0.522479 0.641015 0.500311 0.617748
25 -1.044292 -0.058933 -1.340279 -1.302246 1.751828 -0.815403
26 -0.849044 0.079838 -0.400536 -1.312330 1.498217 -0.550869
27 -0.730672 -0.326196 -0.478608 -0.832610 -0.556236 -0.653280
28 -0.380922 -0.892886 -0.555313 -0.113628 1.211258 -0.901155
29 -0.368302 -1.168844 -0.094765 -0.158075 1.016584 -1.274561
... ... ... ... ... ... ...
189 -1.023243 0.827082 0.695531 0.482823 -0.093190 -0.130945
190 1.643548 -0.570770 0.545333 -0.137189 0.295910 -0.891672
191 1.543182 -0.533850 0.979103 0.227528 0.216491 -0.016099
192 1.416929 -1.770555 0.592692 -1.546796 -0.112419 -0.017441
193 -1.336444 0.162214 -1.528887 1.340066 0.343647 -0.060973
194 -0.331197 -0.545328 0.449891 -2.242097 0.210220 1.299600
195 -0.991382 -0.378373 -0.215170 -2.818431 1.156878 -0.599042
196 0.827092 0.502299 0.219306 1.474834 0.577530 0.832676
197 0.976291 0.325663 -0.091820 0.723604 0.494609 0.610596
198 0.903378 0.857383 0.090549 0.948012 1.127442 0.927032
199 -1.135922 -0.217483 -0.201444 0.204262 -0.033230 -0.725561
200 -1.143077 -0.289624 -0.109440 0.093244 0.007101 -0.571608
201 -1.325584 -0.109383 -0.850284 -0.442939 0.518129 -0.996845
202 0.270878 1.568003 -0.899682 0.187348 -0.995623 0.436835
203 -0.010376 1.403657 -0.298654 0.126520 -0.803249 -0.284875
204 -0.149606 0.679408 -0.527828 0.145473 0.226461 0.232361
205 -1.281900 0.472582 2.041397 -0.186464 1.140780 -0.694445
206 -1.561361 0.699591 0.373931 0.512801 0.245563 -1.259098
207 -0.548022 0.646014 -0.015758 -0.364427 1.106060 -0.395692
208 -0.689835 0.729721 0.242422 0.167324 -0.269920 0.625568
209 -1.182263 0.898528 0.655331 1.146978 -0.973699 0.509883
210 -0.465862 0.576977 -0.088421 1.290934 0.648005 0.669298
211 -0.265321 1.252143 0.230904 0.383047 -0.920749 0.237760
212 0.205358 1.300786 0.929349 -0.432002 -0.464366 -0.242135
213 -0.025600 0.467818 0.261063 -1.437444 -0.391460 -0.995280
214 -1.082557 1.025513 2.276661 1.056731 0.361540 1.291351
215 -1.297371 1.948703 2.264684 1.377703 1.194669 1.983124
216 -0.926424 0.162164 1.016687 1.945841 -1.341651 0.150826
217 -1.375041 -0.362757 -0.599873 1.478900 -0.021584 -0.846072
218 -0.974264 0.740461 0.889462 0.014997 1.024334 -0.992000

219 rows × 6 columns

In [330]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[330]:
[1314.0,
 1103.6617898421102,
 933.9046374976435,
 830.0952355796812,
 752.8157274494505,
 696.8283563577859,
 641.355058887789,
 599.6834692450786,
 558.0899857646746,
 538.6016435622136,
 502.540180641064,
 477.03865333096127,
 457.1745404655215,
 443.08717934712786]
In [331]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[331]:
[<matplotlib.lines.Line2D at 0x1e831ea44e0>]

K=3

In [332]:
kmeans_tc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[332]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [333]:
kmeans_tc.labels_
Out[333]:
array([2, 2, 0, 1, 0, 1, 1, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 0, 2, 0, 2, 2,
       0, 2, 2, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
       2, 2, 1, 0, 0, 1, 1, 1, 2, 2, 2, 0, 0, 0, 2, 0, 2, 1, 1, 2, 2, 2,
       0, 0, 0, 0, 1, 0, 2, 2, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 2, 0, 2, 0,
       2, 1, 1, 2, 2, 1, 2, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 1, 1, 0,
       2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
       2, 0, 0, 0, 2, 0, 0, 2, 1, 0, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2,
       0, 0, 2, 0, 0, 0, 0, 0, 2, 2, 0, 2, 0, 0, 1, 1, 1, 1, 1, 1, 2, 1,
       1, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 0, 1, 2, 2,
       2, 1, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1])
In [334]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[334]:
array([2, 2, 0, 1, 0, 1, 1, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 0, 2, 0, 2, 2,
       0, 2, 2, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
       2, 2, 1, 0, 0, 1, 1, 1, 2, 2, 2, 0, 0, 0, 2, 0, 2, 1, 1, 2, 2, 2,
       0, 0, 0, 0, 1, 0, 2, 2, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 2, 0, 2, 0,
       2, 1, 1, 2, 2, 1, 2, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 1, 1, 0,
       2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
       2, 0, 0, 0, 2, 0, 0, 2, 1, 0, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2,
       0, 0, 2, 0, 0, 0, 0, 0, 2, 2, 0, 2, 0, 0, 1, 1, 1, 1, 1, 1, 2, 1,
       1, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 0, 1, 2, 2,
       2, 1, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1])
In [335]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [336]:
X
Out[336]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 -1.035481 1.779354 1.874576 0.924814 -0.129662 2.608421 2 0
1 0.965487 -0.399971 -1.606069 0.008311 0.834341 0.513694 2 0
2 -0.141249 -1.969933 -0.960470 1.005123 -1.117123 -2.399517 0 0
3 -1.590590 -0.729741 -0.575342 0.587988 -0.885561 -0.752828 1 0
4 -0.391524 -0.894181 -0.426309 1.017585 -0.391173 -0.920259 0 0
5 -1.256622 -0.886861 -0.850243 0.516749 -0.491454 -0.072867 1 0
6 -1.579202 0.121365 -0.522749 1.012025 -0.547676 -0.140430 1 0
7 -1.760350 -0.182429 -0.008789 1.576085 -0.878841 0.252104 2 0
8 1.115526 1.555384 0.609404 0.558809 -0.514428 -0.221726 2 0
9 1.467291 1.402697 0.806896 -0.279535 0.939735 0.758333 2 0
10 0.972379 1.550575 -0.223468 0.899199 1.412818 0.386724 2 0
11 0.294385 0.890870 0.493531 0.142145 0.212432 1.463886 2 0
12 0.795134 0.176458 1.588747 -0.412034 -0.982878 -0.299581 0 0
13 0.694481 0.577820 0.319393 0.451356 0.219257 0.563199 2 0
14 1.169114 0.075245 -0.980006 1.330732 2.094068 1.785970 2 0
15 0.962642 0.380225 -1.261850 1.044019 1.339949 1.776870 2 0
16 1.352245 0.463507 -0.679184 0.941519 2.196602 1.991369 2 0
17 1.784002 -1.453636 -1.128885 -0.626496 0.399672 -0.605861 0 0
18 0.929212 -0.538274 -1.016394 -0.167176 0.557933 1.511009 2 0
19 1.199761 -0.727252 0.322239 -1.105069 -0.125311 -0.979170 0 0
20 -0.485056 0.796900 0.581966 1.884586 -0.705890 -0.725300 2 0
21 -0.547233 0.692440 -0.162284 2.025268 -0.631876 -0.337240 2 0
22 1.446103 -0.074850 -0.132752 -0.064117 -0.209506 -0.465551 0 0
23 -0.312063 0.030270 -1.160963 0.726155 -1.511552 -0.509175 2 0
24 1.175126 -0.143713 -0.522479 0.641015 0.500311 0.617748 2 0
25 -1.044292 -0.058933 -1.340279 -1.302246 1.751828 -0.815403 1 0
26 -0.849044 0.079838 -0.400536 -1.312330 1.498217 -0.550869 1 0
27 -0.730672 -0.326196 -0.478608 -0.832610 -0.556236 -0.653280 1 0
28 -0.380922 -0.892886 -0.555313 -0.113628 1.211258 -0.901155 1 0
29 -0.368302 -1.168844 -0.094765 -0.158075 1.016584 -1.274561 1 0
... ... ... ... ... ... ... ... ...
189 -1.023243 0.827082 0.695531 0.482823 -0.093190 -0.130945 2 1
190 1.643548 -0.570770 0.545333 -0.137189 0.295910 -0.891672 0 1
191 1.543182 -0.533850 0.979103 0.227528 0.216491 -0.016099 0 1
192 1.416929 -1.770555 0.592692 -1.546796 -0.112419 -0.017441 0 1
193 -1.336444 0.162214 -1.528887 1.340066 0.343647 -0.060973 1 1
194 -0.331197 -0.545328 0.449891 -2.242097 0.210220 1.299600 0 1
195 -0.991382 -0.378373 -0.215170 -2.818431 1.156878 -0.599042 1 1
196 0.827092 0.502299 0.219306 1.474834 0.577530 0.832676 2 1
197 0.976291 0.325663 -0.091820 0.723604 0.494609 0.610596 2 1
198 0.903378 0.857383 0.090549 0.948012 1.127442 0.927032 2 1
199 -1.135922 -0.217483 -0.201444 0.204262 -0.033230 -0.725561 1 1
200 -1.143077 -0.289624 -0.109440 0.093244 0.007101 -0.571608 1 1
201 -1.325584 -0.109383 -0.850284 -0.442939 0.518129 -0.996845 1 1
202 0.270878 1.568003 -0.899682 0.187348 -0.995623 0.436835 2 1
203 -0.010376 1.403657 -0.298654 0.126520 -0.803249 -0.284875 2 1
204 -0.149606 0.679408 -0.527828 0.145473 0.226461 0.232361 2 1
205 -1.281900 0.472582 2.041397 -0.186464 1.140780 -0.694445 1 1
206 -1.561361 0.699591 0.373931 0.512801 0.245563 -1.259098 1 1
207 -0.548022 0.646014 -0.015758 -0.364427 1.106060 -0.395692 1 1
208 -0.689835 0.729721 0.242422 0.167324 -0.269920 0.625568 2 1
209 -1.182263 0.898528 0.655331 1.146978 -0.973699 0.509883 2 1
210 -0.465862 0.576977 -0.088421 1.290934 0.648005 0.669298 2 1
211 -0.265321 1.252143 0.230904 0.383047 -0.920749 0.237760 2 1
212 0.205358 1.300786 0.929349 -0.432002 -0.464366 -0.242135 2 1
213 -0.025600 0.467818 0.261063 -1.437444 -0.391460 -0.995280 1 1
214 -1.082557 1.025513 2.276661 1.056731 0.361540 1.291351 2 1
215 -1.297371 1.948703 2.264684 1.377703 1.194669 1.983124 2 1
216 -0.926424 0.162164 1.016687 1.945841 -1.341651 0.150826 2 1
217 -1.375041 -0.362757 -0.599873 1.478900 -0.021584 -0.846072 1 1
218 -0.974264 0.740461 0.889462 0.014997 1.024334 -0.992000 1 1

219 rows × 8 columns

In [337]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[337]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e832243f98>
In [385]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[5]))

Urban Place

ANN

In [386]:
X = df_n_ps_std_tc[5]
In [387]:
y = df_n_ps[5]['chosen']
In [388]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [389]:
X_train.shape
Out[389]:
(168, 6)
In [343]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [344]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [345]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [346]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20, 10), 'learning_rate_init': 0.005, 'max_iter': 200}, que permiten obtener un Accuracy de 70.83% y un Kappa del 35.52
Tiempo total: 28.81 minutos
In [390]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20,10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
In [391]:
input_tensor = Input(shape = (n0,))
In [392]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [393]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [394]:
model.summary()
Model: "model_20"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_20 (InputLayer)        (None, 6)                 0         
_________________________________________________________________
dense_63 (Dense)             (None, 20)                140       
_________________________________________________________________
dense_64 (Dense)             (None, 10)                210       
_________________________________________________________________
dense_65 (Dense)             (None, 1)                 11        
=================================================================
Total params: 361
Trainable params: 361
Non-trainable params: 0
_________________________________________________________________
In [395]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 168 samples, validate on 57 samples
Epoch 1/100
168/168 [==============================] - 0s 1ms/step - loss: 0.7516 - accuracy: 0.4524 - val_loss: 0.6534 - val_accuracy: 0.6316
Epoch 2/100
168/168 [==============================] - 0s 95us/step - loss: 0.6794 - accuracy: 0.5536 - val_loss: 0.6620 - val_accuracy: 0.5789
Epoch 3/100
168/168 [==============================] - 0s 89us/step - loss: 0.6278 - accuracy: 0.6429 - val_loss: 0.6804 - val_accuracy: 0.5789
Epoch 4/100
168/168 [==============================] - 0s 77us/step - loss: 0.5998 - accuracy: 0.7143 - val_loss: 0.7046 - val_accuracy: 0.6316
Epoch 5/100
168/168 [==============================] - 0s 65us/step - loss: 0.5769 - accuracy: 0.7321 - val_loss: 0.7229 - val_accuracy: 0.6316
Epoch 6/100
168/168 [==============================] - 0s 77us/step - loss: 0.5672 - accuracy: 0.7381 - val_loss: 0.7281 - val_accuracy: 0.6491
Epoch 7/100
168/168 [==============================] - 0s 77us/step - loss: 0.5561 - accuracy: 0.7679 - val_loss: 0.7289 - val_accuracy: 0.6667
Epoch 8/100
168/168 [==============================] - 0s 65us/step - loss: 0.5490 - accuracy: 0.7679 - val_loss: 0.7377 - val_accuracy: 0.6667
Epoch 9/100
168/168 [==============================] - 0s 65us/step - loss: 0.5437 - accuracy: 0.7738 - val_loss: 0.7515 - val_accuracy: 0.6491
Epoch 10/100
168/168 [==============================] - 0s 59us/step - loss: 0.5371 - accuracy: 0.7738 - val_loss: 0.7648 - val_accuracy: 0.6140
Epoch 11/100
168/168 [==============================] - 0s 59us/step - loss: 0.5354 - accuracy: 0.7917 - val_loss: 0.7719 - val_accuracy: 0.6140
Epoch 12/100
168/168 [==============================] - 0s 59us/step - loss: 0.5319 - accuracy: 0.8036 - val_loss: 0.7592 - val_accuracy: 0.6316
Epoch 13/100
168/168 [==============================] - 0s 65us/step - loss: 0.5290 - accuracy: 0.7798 - val_loss: 0.7525 - val_accuracy: 0.6491
Epoch 14/100
168/168 [==============================] - 0s 65us/step - loss: 0.5266 - accuracy: 0.7917 - val_loss: 0.7563 - val_accuracy: 0.6491
Epoch 15/100
168/168 [==============================] - 0s 65us/step - loss: 0.5245 - accuracy: 0.8036 - val_loss: 0.7535 - val_accuracy: 0.6842
Epoch 16/100
168/168 [==============================] - 0s 65us/step - loss: 0.5211 - accuracy: 0.7798 - val_loss: 0.7572 - val_accuracy: 0.6842
Epoch 17/100
168/168 [==============================] - 0s 71us/step - loss: 0.5179 - accuracy: 0.7917 - val_loss: 0.7595 - val_accuracy: 0.6667
Epoch 18/100
168/168 [==============================] - 0s 59us/step - loss: 0.5160 - accuracy: 0.7976 - val_loss: 0.7636 - val_accuracy: 0.6667
Epoch 19/100
168/168 [==============================] - 0s 65us/step - loss: 0.5133 - accuracy: 0.7798 - val_loss: 0.7590 - val_accuracy: 0.6667
Epoch 20/100
168/168 [==============================] - 0s 65us/step - loss: 0.5100 - accuracy: 0.7917 - val_loss: 0.7540 - val_accuracy: 0.6667
Epoch 21/100
168/168 [==============================] - 0s 59us/step - loss: 0.5073 - accuracy: 0.7917 - val_loss: 0.7603 - val_accuracy: 0.6667
Epoch 22/100
168/168 [==============================] - 0s 65us/step - loss: 0.5060 - accuracy: 0.7857 - val_loss: 0.7696 - val_accuracy: 0.6491
Epoch 23/100
168/168 [==============================] - 0s 65us/step - loss: 0.5016 - accuracy: 0.8095 - val_loss: 0.7726 - val_accuracy: 0.6316
Epoch 24/100
168/168 [==============================] - 0s 65us/step - loss: 0.5002 - accuracy: 0.7976 - val_loss: 0.7699 - val_accuracy: 0.6667
Epoch 25/100
168/168 [==============================] - 0s 65us/step - loss: 0.4978 - accuracy: 0.7976 - val_loss: 0.7785 - val_accuracy: 0.6667

Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 26/100
168/168 [==============================] - 0s 59us/step - loss: 0.4944 - accuracy: 0.8036 - val_loss: 0.7787 - val_accuracy: 0.6842
Epoch 27/100
168/168 [==============================] - 0s 71us/step - loss: 0.4930 - accuracy: 0.7917 - val_loss: 0.7802 - val_accuracy: 0.6667
Epoch 28/100
168/168 [==============================] - 0s 101us/step - loss: 0.4907 - accuracy: 0.8036 - val_loss: 0.7801 - val_accuracy: 0.6667
Epoch 29/100
168/168 [==============================] - 0s 71us/step - loss: 0.4896 - accuracy: 0.7976 - val_loss: 0.7687 - val_accuracy: 0.6667
Epoch 30/100
168/168 [==============================] - 0s 71us/step - loss: 0.4882 - accuracy: 0.7917 - val_loss: 0.7618 - val_accuracy: 0.6667
Epoch 31/100
168/168 [==============================] - 0s 71us/step - loss: 0.4868 - accuracy: 0.7976 - val_loss: 0.7595 - val_accuracy: 0.6667
Epoch 32/100
168/168 [==============================] - 0s 71us/step - loss: 0.4847 - accuracy: 0.7917 - val_loss: 0.7590 - val_accuracy: 0.6667
Epoch 33/100
168/168 [==============================] - 0s 59us/step - loss: 0.4828 - accuracy: 0.7976 - val_loss: 0.7548 - val_accuracy: 0.6667
Epoch 34/100
168/168 [==============================] - 0s 77us/step - loss: 0.4806 - accuracy: 0.7976 - val_loss: 0.7538 - val_accuracy: 0.6667
Epoch 35/100
168/168 [==============================] - 0s 77us/step - loss: 0.4791 - accuracy: 0.8155 - val_loss: 0.7565 - val_accuracy: 0.6667

Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 36/100
168/168 [==============================] - 0s 119us/step - loss: 0.4771 - accuracy: 0.8155 - val_loss: 0.7576 - val_accuracy: 0.6667
Epoch 37/100
168/168 [==============================] - 0s 71us/step - loss: 0.4765 - accuracy: 0.8155 - val_loss: 0.7602 - val_accuracy: 0.6667
Epoch 38/100
168/168 [==============================] - 0s 77us/step - loss: 0.4759 - accuracy: 0.8155 - val_loss: 0.7621 - val_accuracy: 0.6667
Epoch 39/100
168/168 [==============================] - 0s 71us/step - loss: 0.4749 - accuracy: 0.8155 - val_loss: 0.7638 - val_accuracy: 0.6667
Epoch 40/100
168/168 [==============================] - 0s 65us/step - loss: 0.4738 - accuracy: 0.8155 - val_loss: 0.7652 - val_accuracy: 0.6667
Epoch 41/100
168/168 [==============================] - 0s 71us/step - loss: 0.4728 - accuracy: 0.8155 - val_loss: 0.7650 - val_accuracy: 0.6667
Epoch 42/100
168/168 [==============================] - 0s 65us/step - loss: 0.4723 - accuracy: 0.8155 - val_loss: 0.7694 - val_accuracy: 0.6667
Epoch 43/100
168/168 [==============================] - 0s 65us/step - loss: 0.4712 - accuracy: 0.8214 - val_loss: 0.7701 - val_accuracy: 0.6667
Epoch 44/100
168/168 [==============================] - 0s 65us/step - loss: 0.4705 - accuracy: 0.8214 - val_loss: 0.7686 - val_accuracy: 0.6667
Epoch 45/100
168/168 [==============================] - 0s 65us/step - loss: 0.4691 - accuracy: 0.8214 - val_loss: 0.7664 - val_accuracy: 0.6667

Epoch 00045: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 46/100
168/168 [==============================] - 0s 65us/step - loss: 0.4682 - accuracy: 0.8214 - val_loss: 0.7636 - val_accuracy: 0.6667
Epoch 47/100
168/168 [==============================] - 0s 71us/step - loss: 0.4678 - accuracy: 0.8214 - val_loss: 0.7618 - val_accuracy: 0.6667
Epoch 48/100
168/168 [==============================] - 0s 71us/step - loss: 0.4672 - accuracy: 0.8214 - val_loss: 0.7608 - val_accuracy: 0.6667
Epoch 49/100
168/168 [==============================] - 0s 89us/step - loss: 0.4667 - accuracy: 0.8214 - val_loss: 0.7609 - val_accuracy: 0.6667
Epoch 50/100
168/168 [==============================] - 0s 89us/step - loss: 0.4662 - accuracy: 0.8214 - val_loss: 0.7620 - val_accuracy: 0.6667
Epoch 51/100
168/168 [==============================] - 0s 65us/step - loss: 0.4658 - accuracy: 0.8214 - val_loss: 0.7614 - val_accuracy: 0.6667
Epoch 52/100
168/168 [==============================] - 0s 71us/step - loss: 0.4652 - accuracy: 0.8214 - val_loss: 0.7618 - val_accuracy: 0.6667
Epoch 53/100
168/168 [==============================] - 0s 65us/step - loss: 0.4647 - accuracy: 0.8214 - val_loss: 0.7616 - val_accuracy: 0.6667
Epoch 54/100
168/168 [==============================] - 0s 65us/step - loss: 0.4643 - accuracy: 0.8214 - val_loss: 0.7591 - val_accuracy: 0.6667
Epoch 55/100
168/168 [==============================] - 0s 65us/step - loss: 0.4638 - accuracy: 0.8214 - val_loss: 0.7577 - val_accuracy: 0.6667

Epoch 00055: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 56/100
168/168 [==============================] - 0s 71us/step - loss: 0.4632 - accuracy: 0.8214 - val_loss: 0.7573 - val_accuracy: 0.6667
Epoch 57/100
168/168 [==============================] - 0s 65us/step - loss: 0.4630 - accuracy: 0.8214 - val_loss: 0.7570 - val_accuracy: 0.6667
Epoch 58/100
168/168 [==============================] - 0s 59us/step - loss: 0.4628 - accuracy: 0.8274 - val_loss: 0.7557 - val_accuracy: 0.6667
Epoch 59/100
168/168 [==============================] - 0s 71us/step - loss: 0.4625 - accuracy: 0.8274 - val_loss: 0.7553 - val_accuracy: 0.6667
Epoch 60/100
168/168 [==============================] - 0s 71us/step - loss: 0.4624 - accuracy: 0.8274 - val_loss: 0.7548 - val_accuracy: 0.6667
Epoch 61/100
168/168 [==============================] - 0s 125us/step - loss: 0.4621 - accuracy: 0.8274 - val_loss: 0.7555 - val_accuracy: 0.6667
Epoch 62/100
168/168 [==============================] - 0s 83us/step - loss: 0.4618 - accuracy: 0.8274 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 63/100
168/168 [==============================] - 0s 83us/step - loss: 0.4616 - accuracy: 0.8274 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 64/100
168/168 [==============================] - 0s 71us/step - loss: 0.4613 - accuracy: 0.8274 - val_loss: 0.7565 - val_accuracy: 0.6667
Epoch 65/100
168/168 [==============================] - 0s 65us/step - loss: 0.4611 - accuracy: 0.8274 - val_loss: 0.7565 - val_accuracy: 0.6667

Epoch 00065: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 66/100
168/168 [==============================] - 0s 65us/step - loss: 0.4609 - accuracy: 0.8274 - val_loss: 0.7561 - val_accuracy: 0.6667
Epoch 67/100
168/168 [==============================] - 0s 65us/step - loss: 0.4608 - accuracy: 0.8274 - val_loss: 0.7560 - val_accuracy: 0.6667
Epoch 68/100
168/168 [==============================] - 0s 65us/step - loss: 0.4606 - accuracy: 0.8274 - val_loss: 0.7560 - val_accuracy: 0.6667
Epoch 69/100
168/168 [==============================] - 0s 65us/step - loss: 0.4605 - accuracy: 0.8274 - val_loss: 0.7559 - val_accuracy: 0.6667
Epoch 70/100
168/168 [==============================] - 0s 65us/step - loss: 0.4604 - accuracy: 0.8274 - val_loss: 0.7558 - val_accuracy: 0.6667
Epoch 71/100
168/168 [==============================] - 0s 77us/step - loss: 0.4602 - accuracy: 0.8274 - val_loss: 0.7558 - val_accuracy: 0.6667
Epoch 72/100
168/168 [==============================] - 0s 65us/step - loss: 0.4601 - accuracy: 0.8274 - val_loss: 0.7561 - val_accuracy: 0.6667
Epoch 73/100
168/168 [==============================] - 0s 71us/step - loss: 0.4600 - accuracy: 0.8274 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 74/100
168/168 [==============================] - 0s 77us/step - loss: 0.4599 - accuracy: 0.8274 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 75/100
168/168 [==============================] - 0s 65us/step - loss: 0.4598 - accuracy: 0.8274 - val_loss: 0.7560 - val_accuracy: 0.6667

Epoch 00075: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 76/100
168/168 [==============================] - 0s 65us/step - loss: 0.4596 - accuracy: 0.8274 - val_loss: 0.7561 - val_accuracy: 0.6667
Epoch 77/100
168/168 [==============================] - 0s 77us/step - loss: 0.4596 - accuracy: 0.8274 - val_loss: 0.7561 - val_accuracy: 0.6667
Epoch 78/100
168/168 [==============================] - 0s 113us/step - loss: 0.4595 - accuracy: 0.8274 - val_loss: 0.7561 - val_accuracy: 0.6667
Epoch 79/100
168/168 [==============================] - 0s 89us/step - loss: 0.4595 - accuracy: 0.8274 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 80/100
168/168 [==============================] - 0s 83us/step - loss: 0.4594 - accuracy: 0.8274 - val_loss: 0.7564 - val_accuracy: 0.6667
Epoch 81/100
168/168 [==============================] - 0s 59us/step - loss: 0.4593 - accuracy: 0.8274 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 82/100
168/168 [==============================] - 0s 65us/step - loss: 0.4593 - accuracy: 0.8274 - val_loss: 0.7565 - val_accuracy: 0.6667
Epoch 83/100
168/168 [==============================] - 0s 65us/step - loss: 0.4592 - accuracy: 0.8274 - val_loss: 0.7564 - val_accuracy: 0.6667
Epoch 84/100
168/168 [==============================] - 0s 59us/step - loss: 0.4591 - accuracy: 0.8274 - val_loss: 0.7564 - val_accuracy: 0.6667
Epoch 85/100
168/168 [==============================] - 0s 59us/step - loss: 0.4591 - accuracy: 0.8274 - val_loss: 0.7566 - val_accuracy: 0.6667

Epoch 00085: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 86/100
168/168 [==============================] - 0s 59us/step - loss: 0.4590 - accuracy: 0.8214 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 87/100
168/168 [==============================] - 0s 71us/step - loss: 0.4590 - accuracy: 0.8214 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 88/100
168/168 [==============================] - 0s 65us/step - loss: 0.4590 - accuracy: 0.8214 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 89/100
168/168 [==============================] - 0s 59us/step - loss: 0.4589 - accuracy: 0.8214 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 90/100
168/168 [==============================] - 0s 83us/step - loss: 0.4589 - accuracy: 0.8214 - val_loss: 0.7567 - val_accuracy: 0.6667
Epoch 91/100
168/168 [==============================] - 0s 83us/step - loss: 0.4589 - accuracy: 0.8214 - val_loss: 0.7566 - val_accuracy: 0.6667
Epoch 92/100
168/168 [==============================] - 0s 77us/step - loss: 0.4588 - accuracy: 0.8214 - val_loss: 0.7567 - val_accuracy: 0.6667
Epoch 93/100
168/168 [==============================] - 0s 65us/step - loss: 0.4588 - accuracy: 0.8214 - val_loss: 0.7567 - val_accuracy: 0.6667
Epoch 94/100
168/168 [==============================] - 0s 65us/step - loss: 0.4588 - accuracy: 0.8214 - val_loss: 0.7567 - val_accuracy: 0.6667
Epoch 95/100
168/168 [==============================] - 0s 65us/step - loss: 0.4587 - accuracy: 0.8214 - val_loss: 0.7565 - val_accuracy: 0.6667

Epoch 00095: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 96/100
168/168 [==============================] - 0s 71us/step - loss: 0.4587 - accuracy: 0.8214 - val_loss: 0.7564 - val_accuracy: 0.6667
Epoch 97/100
168/168 [==============================] - 0s 59us/step - loss: 0.4587 - accuracy: 0.8214 - val_loss: 0.7564 - val_accuracy: 0.6667
Epoch 98/100
168/168 [==============================] - 0s 65us/step - loss: 0.4587 - accuracy: 0.8214 - val_loss: 0.7563 - val_accuracy: 0.6667
Epoch 99/100
168/168 [==============================] - 0s 65us/step - loss: 0.4587 - accuracy: 0.8214 - val_loss: 0.7562 - val_accuracy: 0.6667
Epoch 100/100
168/168 [==============================] - 0s 71us/step - loss: 0.4586 - accuracy: 0.8214 - val_loss: 0.7561 - val_accuracy: 0.6667
In [396]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 100)
In [397]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
57/57 [==============================] - 0s 53us/step
test loss: 0.756120643071961, test accuracy: 0.6666666865348816
In [398]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.5753246753246753
In [399]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.19479553903345737

KMeans

In [357]:
X
Out[357]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6
0 1.221298 -0.131733 -0.299437 -1.504299 -0.981450 0.118501
1 -0.401407 -1.423353 0.879221 0.761177 -0.141123 -1.312558
2 -0.336789 -0.749510 0.197427 0.350093 -0.148137 0.191187
3 -0.785803 0.323072 0.152856 -0.630916 1.431384 -1.260729
4 -0.879329 0.391910 0.348949 -0.626137 1.401315 -0.417223
5 -1.122300 0.270282 0.525297 1.043848 -0.500903 0.121388
6 1.854801 -0.110375 0.714529 -0.774181 0.550772 -0.030796
7 1.233933 0.235407 0.220324 -0.416248 0.185500 0.429491
8 0.870319 0.678474 0.499988 1.108991 1.281285 0.734961
9 0.390986 0.860360 -1.381945 0.173292 -0.891547 -0.692921
10 0.746873 1.087314 0.138873 0.673665 0.999100 1.072527
11 0.794479 0.967314 -0.363499 1.122351 0.135802 -0.719195
12 -1.413209 -0.136961 -0.371584 0.036112 -0.471990 -1.250545
13 -0.072759 -1.149988 1.952271 0.500676 0.052506 -1.006895
14 -1.273932 -0.022476 -0.393398 0.422307 -1.130763 -0.797664
15 -1.343760 -0.913734 0.077520 -0.431201 -1.914695 0.581853
16 -0.984733 -0.428093 1.220123 -0.685684 1.139966 0.712742
17 0.280303 1.062737 -0.783356 -0.172690 -0.675275 0.261145
18 0.000660 1.240654 -0.919850 -0.619755 -0.483109 -0.722220
19 0.174949 1.113514 -2.625056 -0.681779 -0.763682 0.031420
20 -0.183363 -1.557702 -0.979966 0.553109 1.161043 -0.644240
21 -0.171420 -1.728398 -0.653886 -0.361851 0.584439 -0.772678
22 -0.120605 -1.190295 1.627818 0.127251 -0.575228 0.475040
23 -1.074141 1.086592 0.969446 0.102117 0.044168 1.096704
24 -1.016635 1.061833 0.981137 -0.047605 -0.063650 0.778928
25 -1.093370 1.015536 -0.193033 -0.486717 -0.763605 -0.977680
26 -0.428676 1.178009 1.290983 0.772016 1.623629 0.867341
27 -1.196386 -0.495161 0.531435 0.385220 -1.383020 0.795049
28 0.866731 0.412772 1.289048 -0.014727 1.077496 -0.617061
29 1.237442 0.582034 1.478319 -0.565057 0.802127 -0.509015
... ... ... ... ... ... ...
195 -0.603701 0.935409 -0.419589 -0.931752 0.907517 -1.018120
196 1.708997 -0.221303 -1.454810 -0.881563 -0.670232 -0.206469
197 -0.120947 0.962072 -0.135798 1.668524 0.852748 -1.052456
198 1.023034 0.904205 -0.425604 1.452149 1.440979 2.026447
199 1.607139 1.023377 -0.668664 -0.630504 0.512795 1.087292
200 1.108675 0.450827 -1.313634 -0.450630 -1.386651 1.571188
201 0.806904 1.114638 0.335775 0.794859 0.028131 1.383505
202 1.126699 1.174832 -0.674497 0.392635 -1.533629 0.824473
203 -0.345663 -1.373739 1.109525 0.377449 -2.292182 -1.513942
204 1.158812 0.165993 -1.074239 1.512949 2.035898 0.995373
205 1.365396 -0.199409 -0.349417 -2.286552 -1.926263 0.106827
206 0.988164 -1.144285 -0.272909 -0.868581 -2.124224 1.273870
207 0.905347 -0.717727 0.050759 0.376767 -0.593122 0.072830
208 0.749668 1.003858 -0.003342 2.104938 0.631976 0.706610
209 1.094646 0.741328 -0.142645 2.175202 1.046907 1.820650
210 0.893047 0.990481 0.297007 2.573965 0.840532 0.246122
211 2.012667 0.112112 2.554833 0.422693 0.504618 -0.513686
212 0.509494 -0.082608 1.308021 -0.752580 0.394880 -1.613007
213 0.505972 0.224521 1.805274 -0.533802 1.144542 -2.601310
214 1.135681 1.126084 0.303609 1.072045 0.556683 1.593221
215 0.894848 1.251553 0.035370 1.132716 1.078850 -0.087268
216 0.990624 1.229073 0.307033 1.158121 1.115168 -0.568339
217 -1.693745 0.917833 -1.767095 -0.013187 0.902548 -0.580976
218 -1.788316 1.075356 -1.823314 0.354994 1.353021 -0.863177
219 -1.682127 0.743941 -1.291767 0.306169 1.596294 -0.005674
220 0.705066 0.687717 -1.639219 1.045094 -0.345182 3.118994
221 0.520886 1.211743 0.887971 1.035591 0.420222 1.151952
222 0.456500 -1.350072 -0.027792 -0.269632 -1.081328 0.764550
223 -0.648368 0.201861 0.483292 -0.672521 0.434345 -0.365001
224 0.650931 -1.322808 0.580194 0.873078 -0.583450 -0.803746

225 rows × 6 columns

In [358]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[358]:
[1350.0,
 1124.435731037261,
 969.7111524654798,
 861.1739982448028,
 779.024220761341,
 706.2330662595951,
 649.2198311716684,
 599.2097480238398,
 565.7104737024936,
 525.1010514512898,
 497.51074255236824,
 479.1767694058344,
 448.31267298836974,
 436.24565823492435]
In [359]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[359]:
[<matplotlib.lines.Line2D at 0x1e8322a9160>]

K=3

In [360]:
kmeans_tc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_tc.fit(X)
Out[360]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [361]:
kmeans_tc.labels_
Out[361]:
array([0, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 2, 2, 2, 0, 2, 1, 2, 0, 2, 2,
       2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 0, 0, 2, 2,
       2, 2, 0, 0, 0, 2, 0, 2, 1, 1, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 2, 1,
       0, 0, 0, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 0, 0, 2, 0, 0, 0,
       1, 1, 2, 1, 1, 0, 0, 1, 2, 2, 2, 2, 0, 2, 2, 2, 0, 2, 2, 2, 0, 0,
       1, 1, 0, 1, 2, 2, 2, 1, 0, 1, 1, 2, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2,
       2, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 1, 2, 2, 1, 2, 0, 1, 2, 0, 2, 1,
       2, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2,
       2, 0, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 0, 1,
       1, 1, 0, 1, 1, 2, 1, 0, 0, 0, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2,
       1, 1, 0, 2, 2])
In [362]:
clusters_tc = kmeans_tc.predict(X)
clusters_tc
Out[362]:
array([0, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 2, 2, 2, 0, 2, 1, 2, 0, 2, 2,
       2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 0, 0, 2, 2,
       2, 2, 0, 0, 0, 2, 0, 2, 1, 1, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 2, 1,
       0, 0, 0, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 0, 0, 2, 0, 0, 0,
       1, 1, 2, 1, 1, 0, 0, 1, 2, 2, 2, 2, 0, 2, 2, 2, 0, 2, 2, 2, 0, 0,
       1, 1, 0, 1, 2, 2, 2, 1, 0, 1, 1, 2, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2,
       2, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 1, 2, 2, 1, 2, 0, 1, 2, 0, 2, 1,
       2, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2,
       2, 0, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 0, 1,
       1, 1, 0, 1, 1, 2, 1, 0, 0, 0, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2,
       1, 1, 0, 2, 2])
In [363]:
X.loc[:,'Cluster'] = clusters_tc
X.loc[:,'chosen'] = list(y)
In [364]:
X
Out[364]:
tonalcentroidfiles_1 tonalcentroidfiles_2 tonalcentroidfiles_3 tonalcentroidfiles_4 tonalcentroidfiles_5 tonalcentroidfiles_6 Cluster chosen
0 1.221298 -0.131733 -0.299437 -1.504299 -0.981450 0.118501 0 0
1 -0.401407 -1.423353 0.879221 0.761177 -0.141123 -1.312558 2 0
2 -0.336789 -0.749510 0.197427 0.350093 -0.148137 0.191187 2 0
3 -0.785803 0.323072 0.152856 -0.630916 1.431384 -1.260729 2 0
4 -0.879329 0.391910 0.348949 -0.626137 1.401315 -0.417223 2 0
5 -1.122300 0.270282 0.525297 1.043848 -0.500903 0.121388 2 0
6 1.854801 -0.110375 0.714529 -0.774181 0.550772 -0.030796 0 0
7 1.233933 0.235407 0.220324 -0.416248 0.185500 0.429491 1 0
8 0.870319 0.678474 0.499988 1.108991 1.281285 0.734961 1 0
9 0.390986 0.860360 -1.381945 0.173292 -0.891547 -0.692921 1 0
10 0.746873 1.087314 0.138873 0.673665 0.999100 1.072527 1 0
11 0.794479 0.967314 -0.363499 1.122351 0.135802 -0.719195 1 0
12 -1.413209 -0.136961 -0.371584 0.036112 -0.471990 -1.250545 2 0
13 -0.072759 -1.149988 1.952271 0.500676 0.052506 -1.006895 2 0
14 -1.273932 -0.022476 -0.393398 0.422307 -1.130763 -0.797664 2 0
15 -1.343760 -0.913734 0.077520 -0.431201 -1.914695 0.581853 0 0
16 -0.984733 -0.428093 1.220123 -0.685684 1.139966 0.712742 2 0
17 0.280303 1.062737 -0.783356 -0.172690 -0.675275 0.261145 1 0
18 0.000660 1.240654 -0.919850 -0.619755 -0.483109 -0.722220 2 0
19 0.174949 1.113514 -2.625056 -0.681779 -0.763682 0.031420 0 0
20 -0.183363 -1.557702 -0.979966 0.553109 1.161043 -0.644240 2 0
21 -0.171420 -1.728398 -0.653886 -0.361851 0.584439 -0.772678 2 0
22 -0.120605 -1.190295 1.627818 0.127251 -0.575228 0.475040 2 0
23 -1.074141 1.086592 0.969446 0.102117 0.044168 1.096704 2 0
24 -1.016635 1.061833 0.981137 -0.047605 -0.063650 0.778928 2 0
25 -1.093370 1.015536 -0.193033 -0.486717 -0.763605 -0.977680 2 0
26 -0.428676 1.178009 1.290983 0.772016 1.623629 0.867341 1 0
27 -1.196386 -0.495161 0.531435 0.385220 -1.383020 0.795049 2 0
28 0.866731 0.412772 1.289048 -0.014727 1.077496 -0.617061 2 0
29 1.237442 0.582034 1.478319 -0.565057 0.802127 -0.509015 2 0
... ... ... ... ... ... ... ... ...
195 -0.603701 0.935409 -0.419589 -0.931752 0.907517 -1.018120 2 1
196 1.708997 -0.221303 -1.454810 -0.881563 -0.670232 -0.206469 0 1
197 -0.120947 0.962072 -0.135798 1.668524 0.852748 -1.052456 1 1
198 1.023034 0.904205 -0.425604 1.452149 1.440979 2.026447 1 1
199 1.607139 1.023377 -0.668664 -0.630504 0.512795 1.087292 1 1
200 1.108675 0.450827 -1.313634 -0.450630 -1.386651 1.571188 0 1
201 0.806904 1.114638 0.335775 0.794859 0.028131 1.383505 1 1
202 1.126699 1.174832 -0.674497 0.392635 -1.533629 0.824473 1 1
203 -0.345663 -1.373739 1.109525 0.377449 -2.292182 -1.513942 2 1
204 1.158812 0.165993 -1.074239 1.512949 2.035898 0.995373 1 1
205 1.365396 -0.199409 -0.349417 -2.286552 -1.926263 0.106827 0 1
206 0.988164 -1.144285 -0.272909 -0.868581 -2.124224 1.273870 0 1
207 0.905347 -0.717727 0.050759 0.376767 -0.593122 0.072830 0 1
208 0.749668 1.003858 -0.003342 2.104938 0.631976 0.706610 1 1
209 1.094646 0.741328 -0.142645 2.175202 1.046907 1.820650 1 1
210 0.893047 0.990481 0.297007 2.573965 0.840532 0.246122 1 1
211 2.012667 0.112112 2.554833 0.422693 0.504618 -0.513686 1 1
212 0.509494 -0.082608 1.308021 -0.752580 0.394880 -1.613007 2 1
213 0.505972 0.224521 1.805274 -0.533802 1.144542 -2.601310 2 1
214 1.135681 1.126084 0.303609 1.072045 0.556683 1.593221 1 1
215 0.894848 1.251553 0.035370 1.132716 1.078850 -0.087268 1 1
216 0.990624 1.229073 0.307033 1.158121 1.115168 -0.568339 1 1
217 -1.693745 0.917833 -1.767095 -0.013187 0.902548 -0.580976 2 1
218 -1.788316 1.075356 -1.823314 0.354994 1.353021 -0.863177 2 1
219 -1.682127 0.743941 -1.291767 0.306169 1.596294 -0.005674 2 1
220 0.705066 0.687717 -1.639219 1.045094 -0.345182 3.118994 1 1
221 0.520886 1.211743 0.887971 1.035591 0.420222 1.151952 1 1
222 0.456500 -1.350072 -0.027792 -0.269632 -1.081328 0.764550 0 1
223 -0.648368 0.201861 0.483292 -0.672521 0.434345 -0.365001 2 1
224 0.650931 -1.322808 0.580194 0.873078 -0.583450 -0.803746 2 1

225 rows × 8 columns

In [365]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[365]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e831cecf60>

Chromagram

In [404]:
df_n_ps_std[0].columns
Out[404]:
Index(['durationfiles', 'rmsfiles', 'rmsmedianfiles', 'lowenergyfiles',
       'ASRfiles', 'beatspectrumfiles', 'eventdensityfiles', 'tempofiles',
       'pulseclarityfiles', 'zerocrossfiles', 'rolloffsfiles',
       'brightnessfiles', 'spreadfiles', 'centroidfiles', 'kurtosisfiles',
       'flatnessfiles', 'entropyfiles', 'mfccfiles_1', 'mfccfiles_2',
       'mfccfiles_3', 'mfccfiles_4', 'mfccfiles_5', 'mfccfiles_6',
       'mfccfiles_7', 'mfccfiles_8', 'mfccfiles_9', 'mfccfiles_10',
       'mfccfiles_11', 'mfccfiles_12', 'mfccfiles_13', 'inharmonicityfiles',
       'bestkeyfiles', 'keyclarityfiles', 'modalityfiles',
       'tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
       'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6',
       'chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
       'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
       'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
       'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12',
       'attackslopefiles', 'attackleapfiles', 'chosen'],
      dtype='object')
In [405]:
df_n_ps_std[0].columns[40:52]
Out[405]:
Index(['chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
       'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
       'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
       'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12'],
      dtype='object')
In [406]:
df_n_ps_std_ch = [None]*len(companies)
for i in range(len(companies)):
    df_n_ps_std_ch[i] = pd.DataFrame(df_n_ps_std[i].iloc[:,40:52])
    df_n_ps_std_ch[i].columns=df_n_ps_std[i].columns[40:52]
df_n_ps_std_ch[0].info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 372 entries, 0 to 371
Data columns (total 12 columns):
chromagramfiles_1     372 non-null float64
chromagramfiles_2     372 non-null float64
chromagramfiles_3     372 non-null float64
chromagramfiles_4     372 non-null float64
chromagramfiles_5     372 non-null float64
chromagramfiles_6     372 non-null float64
chromagramfiles_7     372 non-null float64
chromagramfiles_8     372 non-null float64
chromagramfiles_9     372 non-null float64
chromagramfiles_10    372 non-null float64
chromagramfiles_11    372 non-null float64
chromagramfiles_12    372 non-null float64
dtypes: float64(12)
memory usage: 35.0 KB

Arte Francés

ANN

In [407]:
X = df_n_ps_std_ch[0]
In [408]:
y = df_n_ps[0]['chosen']
In [409]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [410]:
X_train.shape
Out[410]:
(279, 12)
In [24]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [25]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [26]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [27]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (30, 30, 30), 'learning_rate_init': 0.008, 'max_iter': 100}, que permiten obtener un Accuracy de 78.49% y un Kappa del 30.33
Tiempo total: 23.83 minutos
In [411]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30,30,30]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.008
epochs = 100
In [412]:
input_tensor = Input(shape = (n0,))
In [413]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [414]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [415]:
model.summary()
Model: "model_21"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_21 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_66 (Dense)             (None, 30)                390       
_________________________________________________________________
dense_67 (Dense)             (None, 30)                930       
_________________________________________________________________
dense_68 (Dense)             (None, 30)                930       
_________________________________________________________________
dense_69 (Dense)             (None, 1)                 31        
=================================================================
Total params: 2,281
Trainable params: 2,281
Non-trainable params: 0
_________________________________________________________________
In [416]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 279 samples, validate on 93 samples
Epoch 1/100
279/279 [==============================] - 0s 792us/step - loss: 0.6389 - accuracy: 0.6452 - val_loss: 0.5521 - val_accuracy: 0.6882
Epoch 2/100
279/279 [==============================] - 0s 68us/step - loss: 0.5409 - accuracy: 0.7384 - val_loss: 0.5195 - val_accuracy: 0.7742
Epoch 3/100
279/279 [==============================] - 0s 54us/step - loss: 0.5159 - accuracy: 0.7634 - val_loss: 0.5163 - val_accuracy: 0.7742
Epoch 4/100
279/279 [==============================] - 0s 61us/step - loss: 0.4912 - accuracy: 0.7634 - val_loss: 0.5340 - val_accuracy: 0.7849
Epoch 5/100
279/279 [==============================] - 0s 57us/step - loss: 0.4777 - accuracy: 0.7706 - val_loss: 0.5239 - val_accuracy: 0.7634
Epoch 6/100
279/279 [==============================] - 0s 57us/step - loss: 0.4586 - accuracy: 0.7742 - val_loss: 0.5071 - val_accuracy: 0.7634
Epoch 7/100
279/279 [==============================] - 0s 57us/step - loss: 0.4379 - accuracy: 0.7885 - val_loss: 0.5330 - val_accuracy: 0.7742
Epoch 8/100
279/279 [==============================] - 0s 57us/step - loss: 0.4370 - accuracy: 0.7993 - val_loss: 0.5240 - val_accuracy: 0.7849
Epoch 9/100
279/279 [==============================] - 0s 57us/step - loss: 0.3991 - accuracy: 0.8208 - val_loss: 0.4966 - val_accuracy: 0.7849
Epoch 10/100
279/279 [==============================] - 0s 54us/step - loss: 0.3763 - accuracy: 0.8459 - val_loss: 0.5227 - val_accuracy: 0.7742
Epoch 11/100
279/279 [==============================] - 0s 61us/step - loss: 0.3536 - accuracy: 0.8674 - val_loss: 0.5219 - val_accuracy: 0.7849
Epoch 12/100
279/279 [==============================] - 0s 57us/step - loss: 0.3131 - accuracy: 0.8781 - val_loss: 0.5182 - val_accuracy: 0.7527
Epoch 13/100
279/279 [==============================] - 0s 61us/step - loss: 0.2881 - accuracy: 0.8817 - val_loss: 0.5220 - val_accuracy: 0.7849
Epoch 14/100
279/279 [==============================] - 0s 57us/step - loss: 0.2619 - accuracy: 0.8961 - val_loss: 0.5317 - val_accuracy: 0.7742

Epoch 00014: ReduceLROnPlateau reducing learning rate to 0.004000000189989805.
Epoch 15/100
279/279 [==============================] - 0s 61us/step - loss: 0.2267 - accuracy: 0.9211 - val_loss: 0.5444 - val_accuracy: 0.7742
Epoch 16/100
279/279 [==============================] - 0s 57us/step - loss: 0.2121 - accuracy: 0.9247 - val_loss: 0.5642 - val_accuracy: 0.7742
Epoch 17/100
279/279 [==============================] - 0s 57us/step - loss: 0.1956 - accuracy: 0.9319 - val_loss: 0.5427 - val_accuracy: 0.7742
Epoch 18/100
279/279 [==============================] - 0s 61us/step - loss: 0.1754 - accuracy: 0.9427 - val_loss: 0.5455 - val_accuracy: 0.7849
Epoch 19/100
279/279 [==============================] - 0s 57us/step - loss: 0.1654 - accuracy: 0.9570 - val_loss: 0.5611 - val_accuracy: 0.7957
Epoch 20/100
279/279 [==============================] - 0s 64us/step - loss: 0.1513 - accuracy: 0.9642 - val_loss: 0.6203 - val_accuracy: 0.7742
Epoch 21/100
279/279 [==============================] - 0s 57us/step - loss: 0.1370 - accuracy: 0.9642 - val_loss: 0.5838 - val_accuracy: 0.7849
Epoch 22/100
279/279 [==============================] - 0s 57us/step - loss: 0.1285 - accuracy: 0.9642 - val_loss: 0.5995 - val_accuracy: 0.7849
Epoch 23/100
279/279 [==============================] - 0s 68us/step - loss: 0.1139 - accuracy: 0.9785 - val_loss: 0.6266 - val_accuracy: 0.7849
Epoch 24/100
279/279 [==============================] - 0s 75us/step - loss: 0.1090 - accuracy: 0.9857 - val_loss: 0.6823 - val_accuracy: 0.7634
Epoch 25/100
279/279 [==============================] - 0s 61us/step - loss: 0.1047 - accuracy: 0.9785 - val_loss: 0.6614 - val_accuracy: 0.7527
Epoch 26/100
279/279 [==============================] - 0s 90us/step - loss: 0.0932 - accuracy: 0.9821 - val_loss: 0.7107 - val_accuracy: 0.7527
Epoch 27/100
279/279 [==============================] - 0s 64us/step - loss: 0.0814 - accuracy: 0.9928 - val_loss: 0.6746 - val_accuracy: 0.7742
Epoch 28/100
279/279 [==============================] - 0s 64us/step - loss: 0.0705 - accuracy: 0.9892 - val_loss: 0.7344 - val_accuracy: 0.7634
Epoch 29/100
279/279 [==============================] - 0s 82us/step - loss: 0.0669 - accuracy: 0.9928 - val_loss: 0.7636 - val_accuracy: 0.7527

Epoch 00029: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 30/100
279/279 [==============================] - 0s 64us/step - loss: 0.0597 - accuracy: 0.9964 - val_loss: 0.7633 - val_accuracy: 0.7634
Epoch 31/100
279/279 [==============================] - 0s 64us/step - loss: 0.0557 - accuracy: 0.9964 - val_loss: 0.7586 - val_accuracy: 0.7742
Epoch 32/100
279/279 [==============================] - 0s 57us/step - loss: 0.0525 - accuracy: 0.9964 - val_loss: 0.7917 - val_accuracy: 0.7634
Epoch 33/100
279/279 [==============================] - 0s 82us/step - loss: 0.0516 - accuracy: 0.9892 - val_loss: 0.7975 - val_accuracy: 0.7849
Epoch 34/100
279/279 [==============================] - 0s 97us/step - loss: 0.0449 - accuracy: 0.9964 - val_loss: 0.7797 - val_accuracy: 0.7634
Epoch 35/100
279/279 [==============================] - 0s 90us/step - loss: 0.0449 - accuracy: 0.9964 - val_loss: 0.8031 - val_accuracy: 0.7742
Epoch 36/100
279/279 [==============================] - 0s 79us/step - loss: 0.0424 - accuracy: 0.9964 - val_loss: 0.8336 - val_accuracy: 0.7742
Epoch 37/100
279/279 [==============================] - 0s 64us/step - loss: 0.0408 - accuracy: 0.9964 - val_loss: 0.7955 - val_accuracy: 0.7742
Epoch 38/100
279/279 [==============================] - 0s 61us/step - loss: 0.0399 - accuracy: 0.9964 - val_loss: 0.8462 - val_accuracy: 0.7742
Epoch 39/100
279/279 [==============================] - 0s 61us/step - loss: 0.0343 - accuracy: 0.9964 - val_loss: 0.8379 - val_accuracy: 0.7742

Epoch 00039: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 40/100
279/279 [==============================] - 0s 57us/step - loss: 0.0333 - accuracy: 0.9964 - val_loss: 0.8493 - val_accuracy: 0.7849
Epoch 41/100
279/279 [==============================] - 0s 57us/step - loss: 0.0318 - accuracy: 0.9964 - val_loss: 0.8589 - val_accuracy: 0.7849
Epoch 42/100
279/279 [==============================] - 0s 57us/step - loss: 0.0307 - accuracy: 0.9964 - val_loss: 0.8626 - val_accuracy: 0.7742
Epoch 43/100
279/279 [==============================] - 0s 61us/step - loss: 0.0296 - accuracy: 0.9964 - val_loss: 0.8739 - val_accuracy: 0.7634
Epoch 44/100
279/279 [==============================] - 0s 57us/step - loss: 0.0290 - accuracy: 0.9964 - val_loss: 0.8782 - val_accuracy: 0.7742
Epoch 45/100
279/279 [==============================] - 0s 57us/step - loss: 0.0282 - accuracy: 0.9964 - val_loss: 0.8787 - val_accuracy: 0.7634
Epoch 46/100
279/279 [==============================] - 0s 64us/step - loss: 0.0273 - accuracy: 0.9964 - val_loss: 0.8910 - val_accuracy: 0.7742
Epoch 47/100
279/279 [==============================] - 0s 61us/step - loss: 0.0270 - accuracy: 0.9964 - val_loss: 0.8854 - val_accuracy: 0.7742
Epoch 48/100
279/279 [==============================] - 0s 100us/step - loss: 0.0262 - accuracy: 0.9964 - val_loss: 0.8989 - val_accuracy: 0.7634
Epoch 49/100
279/279 [==============================] - 0s 68us/step - loss: 0.0249 - accuracy: 0.9964 - val_loss: 0.9010 - val_accuracy: 0.7742

Epoch 00049: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 50/100
279/279 [==============================] - 0s 64us/step - loss: 0.0243 - accuracy: 0.9964 - val_loss: 0.9060 - val_accuracy: 0.7742
Epoch 51/100
279/279 [==============================] - 0s 61us/step - loss: 0.0239 - accuracy: 0.9964 - val_loss: 0.9030 - val_accuracy: 0.7634
Epoch 52/100
279/279 [==============================] - 0s 61us/step - loss: 0.0236 - accuracy: 0.9964 - val_loss: 0.9014 - val_accuracy: 0.7634
Epoch 53/100
279/279 [==============================] - 0s 72us/step - loss: 0.0230 - accuracy: 0.9964 - val_loss: 0.9147 - val_accuracy: 0.7742
Epoch 54/100
279/279 [==============================] - 0s 61us/step - loss: 0.0228 - accuracy: 0.9964 - val_loss: 0.9206 - val_accuracy: 0.7742
Epoch 55/100
279/279 [==============================] - 0s 61us/step - loss: 0.0223 - accuracy: 0.9964 - val_loss: 0.9178 - val_accuracy: 0.7742
Epoch 56/100
279/279 [==============================] - 0s 61us/step - loss: 0.0218 - accuracy: 0.9964 - val_loss: 0.9186 - val_accuracy: 0.7742
Epoch 57/100
279/279 [==============================] - 0s 57us/step - loss: 0.0216 - accuracy: 0.9964 - val_loss: 0.9237 - val_accuracy: 0.7742
Epoch 58/100
279/279 [==============================] - 0s 82us/step - loss: 0.0215 - accuracy: 1.0000 - val_loss: 0.9231 - val_accuracy: 0.7742
Epoch 59/100
279/279 [==============================] - 0s 68us/step - loss: 0.0210 - accuracy: 1.0000 - val_loss: 0.9316 - val_accuracy: 0.7742

Epoch 00059: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 60/100
279/279 [==============================] - 0s 57us/step - loss: 0.0205 - accuracy: 1.0000 - val_loss: 0.9325 - val_accuracy: 0.7742
Epoch 61/100
279/279 [==============================] - 0s 61us/step - loss: 0.0204 - accuracy: 1.0000 - val_loss: 0.9334 - val_accuracy: 0.7634
Epoch 62/100
279/279 [==============================] - 0s 57us/step - loss: 0.0203 - accuracy: 1.0000 - val_loss: 0.9332 - val_accuracy: 0.7634
Epoch 63/100
279/279 [==============================] - 0s 61us/step - loss: 0.0201 - accuracy: 1.0000 - val_loss: 0.9352 - val_accuracy: 0.7634
Epoch 64/100
279/279 [==============================] - 0s 61us/step - loss: 0.0199 - accuracy: 1.0000 - val_loss: 0.9350 - val_accuracy: 0.7634
Epoch 65/100
279/279 [==============================] - 0s 61us/step - loss: 0.0199 - accuracy: 1.0000 - val_loss: 0.9396 - val_accuracy: 0.7634
Epoch 66/100
279/279 [==============================] - 0s 61us/step - loss: 0.0197 - accuracy: 1.0000 - val_loss: 0.9378 - val_accuracy: 0.7634
Epoch 67/100
279/279 [==============================] - 0s 68us/step - loss: 0.0195 - accuracy: 1.0000 - val_loss: 0.9396 - val_accuracy: 0.7634
Epoch 68/100
279/279 [==============================] - 0s 86us/step - loss: 0.0193 - accuracy: 1.0000 - val_loss: 0.9423 - val_accuracy: 0.7634
Epoch 69/100
279/279 [==============================] - 0s 57us/step - loss: 0.0192 - accuracy: 1.0000 - val_loss: 0.9436 - val_accuracy: 0.7634

Epoch 00069: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 70/100
279/279 [==============================] - 0s 64us/step - loss: 0.0190 - accuracy: 1.0000 - val_loss: 0.9444 - val_accuracy: 0.7634
Epoch 71/100
279/279 [==============================] - 0s 57us/step - loss: 0.0189 - accuracy: 1.0000 - val_loss: 0.9445 - val_accuracy: 0.7634
Epoch 72/100
279/279 [==============================] - 0s 57us/step - loss: 0.0188 - accuracy: 1.0000 - val_loss: 0.9453 - val_accuracy: 0.7634
Epoch 73/100
279/279 [==============================] - 0s 61us/step - loss: 0.0188 - accuracy: 1.0000 - val_loss: 0.9446 - val_accuracy: 0.7634
Epoch 74/100
279/279 [==============================] - 0s 57us/step - loss: 0.0187 - accuracy: 1.0000 - val_loss: 0.9460 - val_accuracy: 0.7634
Epoch 75/100
279/279 [==============================] - 0s 61us/step - loss: 0.0186 - accuracy: 1.0000 - val_loss: 0.9471 - val_accuracy: 0.7634
Epoch 76/100
279/279 [==============================] - 0s 61us/step - loss: 0.0185 - accuracy: 1.0000 - val_loss: 0.9490 - val_accuracy: 0.7634
Epoch 77/100
279/279 [==============================] - 0s 64us/step - loss: 0.0184 - accuracy: 1.0000 - val_loss: 0.9488 - val_accuracy: 0.7634
Epoch 78/100
279/279 [==============================] - 0s 57us/step - loss: 0.0184 - accuracy: 1.0000 - val_loss: 0.9503 - val_accuracy: 0.7634
Epoch 79/100
279/279 [==============================] - 0s 61us/step - loss: 0.0183 - accuracy: 1.0000 - val_loss: 0.9509 - val_accuracy: 0.7634

Epoch 00079: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 80/100
279/279 [==============================] - 0s 68us/step - loss: 0.0182 - accuracy: 1.0000 - val_loss: 0.9515 - val_accuracy: 0.7634
Epoch 81/100
279/279 [==============================] - 0s 61us/step - loss: 0.0181 - accuracy: 1.0000 - val_loss: 0.9519 - val_accuracy: 0.7634
Epoch 82/100
279/279 [==============================] - 0s 64us/step - loss: 0.0181 - accuracy: 1.0000 - val_loss: 0.9524 - val_accuracy: 0.7634
Epoch 83/100
279/279 [==============================] - 0s 90us/step - loss: 0.0181 - accuracy: 1.0000 - val_loss: 0.9528 - val_accuracy: 0.7634
Epoch 84/100
279/279 [==============================] - 0s 90us/step - loss: 0.0180 - accuracy: 1.0000 - val_loss: 0.9532 - val_accuracy: 0.7634
Epoch 85/100
279/279 [==============================] - 0s 90us/step - loss: 0.0180 - accuracy: 1.0000 - val_loss: 0.9536 - val_accuracy: 0.7634
Epoch 86/100
279/279 [==============================] - 0s 93us/step - loss: 0.0180 - accuracy: 1.0000 - val_loss: 0.9537 - val_accuracy: 0.7634
Epoch 87/100
279/279 [==============================] - 0s 86us/step - loss: 0.0179 - accuracy: 1.0000 - val_loss: 0.9538 - val_accuracy: 0.7634
Epoch 88/100
279/279 [==============================] - 0s 64us/step - loss: 0.0179 - accuracy: 1.0000 - val_loss: 0.9538 - val_accuracy: 0.7634
Epoch 89/100
279/279 [==============================] - 0s 61us/step - loss: 0.0178 - accuracy: 1.0000 - val_loss: 0.9535 - val_accuracy: 0.7634

Epoch 00089: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 90/100
279/279 [==============================] - 0s 61us/step - loss: 0.0178 - accuracy: 1.0000 - val_loss: 0.9540 - val_accuracy: 0.7634
Epoch 91/100
279/279 [==============================] - 0s 68us/step - loss: 0.0178 - accuracy: 1.0000 - val_loss: 0.9542 - val_accuracy: 0.7634
Epoch 92/100
279/279 [==============================] - 0s 54us/step - loss: 0.0177 - accuracy: 1.0000 - val_loss: 0.9544 - val_accuracy: 0.7634
Epoch 93/100
279/279 [==============================] - 0s 61us/step - loss: 0.0177 - accuracy: 1.0000 - val_loss: 0.9549 - val_accuracy: 0.7634
Epoch 94/100
279/279 [==============================] - 0s 64us/step - loss: 0.0177 - accuracy: 1.0000 - val_loss: 0.9548 - val_accuracy: 0.7634
Epoch 95/100
279/279 [==============================] - 0s 57us/step - loss: 0.0177 - accuracy: 1.0000 - val_loss: 0.9552 - val_accuracy: 0.7634
Epoch 96/100
279/279 [==============================] - 0s 57us/step - loss: 0.0177 - accuracy: 1.0000 - val_loss: 0.9556 - val_accuracy: 0.7634
Epoch 97/100
279/279 [==============================] - 0s 61us/step - loss: 0.0176 - accuracy: 1.0000 - val_loss: 0.9558 - val_accuracy: 0.7634
Epoch 98/100
279/279 [==============================] - 0s 57us/step - loss: 0.0176 - accuracy: 1.0000 - val_loss: 0.9560 - val_accuracy: 0.7634
Epoch 99/100
279/279 [==============================] - 0s 64us/step - loss: 0.0176 - accuracy: 1.0000 - val_loss: 0.9567 - val_accuracy: 0.7634

Epoch 00099: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 100/100
279/279 [==============================] - 0s 68us/step - loss: 0.0176 - accuracy: 1.0000 - val_loss: 0.9567 - val_accuracy: 0.7634
In [417]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 100)
In [418]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 43us/step
test loss: 0.9567250449170348, test accuracy: 0.7634408473968506
In [419]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.7076719576719577
In [420]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.18615751789976132

KMeans

In [39]:
X
Out[39]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12
0 1.709998 0.081666 2.128489 0.497958 2.506539 1.564462 -0.544626 -0.212915 -0.703695 0.450360 -0.955214 0.070111
1 1.306748 0.425280 1.373850 -0.510593 -0.188549 0.109582 0.202407 1.738301 -0.130093 0.581343 -0.188241 0.261952
2 1.734670 -1.372888 -1.154236 0.358473 -1.159185 -1.272362 -1.103368 1.630111 -1.333754 -1.332400 -0.823796 -1.678793
3 1.734670 -1.064259 -0.709050 0.536782 -0.829959 -1.054348 -0.888469 1.121852 -0.885262 -0.799099 -1.043935 -1.223713
4 1.734670 1.475232 0.304731 1.153720 1.154918 -0.480466 0.141495 0.952700 0.366988 -0.320424 -0.249684 -0.286209
5 -1.357454 -1.560365 -0.993389 2.575326 -0.840510 -0.885550 1.098423 0.910833 -1.901272 -1.828920 -1.282786 -1.273477
6 0.321543 -0.501364 -0.339054 0.091827 -0.326838 0.016241 -0.078257 0.113313 1.271259 -0.267013 0.231504 1.610523
7 1.097999 -0.427045 -0.234103 1.592537 -0.309903 2.367725 -0.109067 1.728224 0.757555 1.272787 1.334116 1.609735
8 1.734670 0.468400 -0.344386 -0.090068 -0.397143 1.133967 -0.709333 -0.262112 -0.367753 0.321938 0.506700 2.032213
9 -1.359746 -1.205127 -0.671317 -0.932419 -0.674974 -1.006055 -0.652429 0.407833 -0.706174 0.460005 1.361925 -0.543693
10 1.587306 0.209036 1.657840 0.476532 1.214490 1.589653 0.700434 0.894283 0.451061 0.995991 1.361926 1.007772
11 1.734670 0.104209 -0.231131 -0.245206 -0.310258 -0.256497 -0.023748 0.747830 -0.408037 -0.453141 -0.079052 1.036713
12 1.035685 0.546933 0.622772 -0.701164 -0.677402 -1.056329 -0.597825 -1.035795 1.271259 0.922609 0.604807 1.083653
13 0.972281 0.789016 0.702993 0.179892 -0.398625 -0.659406 -0.324016 0.383199 1.271259 0.519070 -0.328565 1.268306
14 -1.133199 0.756391 -0.892669 -1.306858 -1.039211 -0.140379 -1.019984 -0.954278 -0.606554 -0.698781 1.361926 -0.845614
15 -0.286686 -0.893181 -0.864608 -1.025582 -1.145054 -1.306954 -0.907368 1.339780 0.096326 0.053290 1.361926 -0.418235
16 -1.078617 -1.219578 -0.935650 -0.892424 -1.324305 -0.677583 -1.147006 -0.197405 -1.202819 -1.032993 1.361926 -0.456323
17 -0.779483 -0.945540 -0.338284 -0.640770 2.494550 1.423590 2.049647 -1.134775 -0.280816 -0.841355 -0.559543 2.119588
18 -0.539003 0.776852 0.530745 -0.519374 2.508300 1.650614 3.324739 -0.880955 0.666814 -0.537766 -0.411351 1.953562
19 -0.654289 -0.841544 0.162215 -0.248271 1.621296 1.601864 3.324739 -1.014659 -0.291794 -0.730857 -0.463545 2.105096
20 0.213529 1.051570 2.128489 2.214653 -1.312378 2.005231 -1.146344 -1.733450 -1.671687 -1.708415 1.033382 -1.042990
21 -1.008970 0.227216 -0.685589 -0.789964 -1.174654 -0.198868 -1.080110 -1.082456 0.645207 -1.226921 1.361926 -0.957975
22 -0.509770 -0.100539 -0.507952 -0.571581 -1.187009 0.856048 -0.968467 -1.269919 1.271259 -0.525888 1.209173 -0.582980
23 -0.434214 -1.542384 0.834388 -1.429739 -1.306939 1.072244 -1.147365 -1.310384 -2.099689 0.794263 1.361926 -1.700448
24 1.024011 -0.133360 2.128488 -0.497823 -0.578522 0.203487 0.200158 1.499097 0.406494 1.107965 -0.366218 0.290605
25 0.996972 -0.068300 0.463479 -0.462229 -0.557242 -0.488353 0.319927 -0.745740 0.672891 1.272786 -0.392191 0.156034
26 1.734670 -0.636382 0.769493 -0.963728 -0.858496 0.118250 -0.164546 -0.383110 -0.373161 0.540783 0.484812 -0.694416
27 0.939519 -0.546060 1.545261 -0.614695 -0.350160 0.761016 -0.827209 -0.140902 -1.236174 1.272787 0.685149 -0.743315
28 0.065764 -1.072686 1.002347 0.205191 0.149629 0.960761 -0.441845 1.071626 -0.641837 1.004214 1.361926 -0.922358
29 1.734670 -1.368860 0.533780 -0.644543 -1.328265 0.179244 -1.144397 -1.100556 -2.100911 -0.305756 -0.595463 -1.054706
... ... ... ... ... ... ... ... ... ... ... ... ...
342 -0.855930 0.826496 -0.453379 -0.949219 0.299850 -1.090327 0.918687 -1.108240 -0.838253 -0.500788 -0.690370 2.119588
343 -1.395065 -0.085953 1.922929 -0.223554 1.095776 -0.911609 -0.062340 1.738301 -1.726601 -0.869379 -1.690911 0.185932
344 -0.298021 -1.181741 -0.986984 -1.060049 -0.912388 -1.297902 -1.142678 -1.388649 -0.921552 1.272787 -1.272055 -1.334409
345 -1.545817 -1.409239 -1.442796 -1.421649 -0.183976 -1.246279 -1.097348 -1.266514 -1.348236 1.272787 -1.481572 -1.042455
346 -0.346415 -1.039454 -1.018195 -0.914582 -0.378597 -1.221697 -1.136525 -0.552667 -0.552626 1.272787 -1.056006 -1.074637
347 -1.474386 0.029941 -0.538026 -0.402193 2.847023 1.102542 2.185053 -1.039952 -2.054190 -1.784163 -1.788691 -0.164281
348 0.118230 0.491129 -1.420467 1.101952 -1.224983 2.813714 1.536334 -1.700918 0.266362 -2.009383 0.865783 -1.676405
349 0.348396 0.059205 -0.665990 2.575326 -0.216770 1.953864 0.561548 -1.182548 0.195642 -1.279866 0.455845 -0.516320
350 1.734670 -0.003146 1.568996 -0.434837 0.521805 0.570604 0.538883 1.192315 -0.104273 0.910080 -0.509533 0.299932
351 -0.099944 -0.751191 2.128489 -1.036671 0.343684 1.099881 -0.554111 0.079251 -1.057160 1.199793 -1.225953 -0.635720
352 1.084858 -0.145339 0.813709 -0.936272 0.827416 1.989130 -0.287384 -0.189383 -0.826764 1.272787 -0.984655 -0.043832
353 1.187787 -0.429263 -0.071507 0.608020 0.731972 1.256808 -0.000437 0.609195 1.271259 0.812543 0.861217 1.341364
354 0.816380 -0.786929 -0.447031 -0.170697 0.510269 0.818066 1.142880 0.163693 0.937487 1.093106 1.361926 1.303202
355 1.118179 -0.721368 -0.381566 0.060659 0.909982 1.044335 1.603314 0.291249 0.641018 1.101106 1.361926 1.616319
356 1.226095 0.286296 -0.087024 -0.242071 -0.449252 -0.446803 -0.671768 -0.078598 1.271259 0.835145 0.775448 0.595847
357 1.613198 0.199015 0.291700 -0.240822 0.410003 0.405486 -0.473827 0.672002 0.944358 1.272787 0.606300 0.775759
358 0.890203 -0.190496 0.624245 -0.555802 0.066592 -0.255891 -0.711912 0.103160 0.368297 1.272787 -0.020119 0.448029
359 -1.618544 2.294335 -0.896404 1.636479 0.891626 -1.405106 -0.282123 -1.727463 0.411358 -1.394589 -1.825966 0.156317
360 -1.152545 1.039906 -0.408095 -0.456317 -0.531367 -1.073809 -0.666121 -1.066545 1.271259 -0.911866 -1.381364 -1.175728
361 -0.667401 2.294335 1.170804 0.615303 1.057413 -0.853776 1.839336 -1.553629 0.257124 -0.175382 -1.194146 -0.159446
362 -1.723669 0.092346 -0.153795 -1.289641 -0.293218 -1.396202 3.324739 -0.906893 -1.978969 -0.337704 -1.827015 0.302210
363 -1.723669 -0.253829 1.824256 -1.430713 0.052559 -1.405490 2.236827 0.207454 -2.097736 1.272787 -1.501135 1.113279
364 -1.723669 -0.295522 0.733720 -1.430713 -0.944463 -1.405490 2.817768 -1.650910 -2.100911 1.272786 -1.827015 -0.821891
365 -0.736785 -0.662679 2.128489 -0.285797 0.766661 0.905055 0.092249 0.094889 -1.072136 0.646308 -0.309608 -0.624335
366 0.261615 -0.713681 2.128489 -0.645248 0.263270 1.334024 -0.452231 1.223459 -1.251130 0.300478 0.115550 -0.729146
367 -0.720122 -0.373744 1.948953 0.317220 1.674285 1.748469 0.387958 0.782162 -0.402149 1.272787 0.811932 -0.459207
368 -0.090607 0.425445 1.059996 0.031889 0.100842 0.641548 0.277516 1.419713 1.271259 0.704201 0.112480 0.417123
369 1.263139 0.315568 2.128489 0.016454 1.564635 1.785525 0.652075 1.086427 0.193496 0.271812 -0.412142 0.549110
370 -1.393336 0.445276 -0.245554 2.057913 1.071731 -0.067700 0.357666 1.738300 1.036275 0.316255 0.704801 -0.474007
371 0.356389 -0.292506 -0.400481 -0.103549 -0.758012 1.042100 -0.936500 1.738300 0.568841 0.239851 -0.233628 0.122538

372 rows × 12 columns

In [40]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[40]:
[4464.0,
 3806.9754284522382,
 3354.365999848692,
 3107.5256199539926,
 2946.6963010094414,
 2816.3317870211076,
 2686.415552171973,
 2576.5543366887405,
 2504.325344232164,
 2391.4415220727014,
 2324.241850993652,
 2257.160251494757,
 2174.641140049278,
 2102.896301963741]
In [41]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[41]:
[<matplotlib.lines.Line2D at 0x1ef5e7ceda0>]

K=3

In [42]:
kmeans_ch = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[42]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [43]:
kmeans_ch.labels_
Out[43]:
array([1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 2, 2, 2, 0, 0,
       0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 2, 0, 2, 1, 1, 1, 0,
       0, 1, 0, 0, 0, 0, 1, 1, 2, 2, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1,
       1, 0, 0, 2, 2, 2, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0,
       1, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 1, 0, 1, 1, 2, 1, 1, 0, 1, 1, 1,
       2, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 2, 0, 2, 2, 2, 1, 0, 1,
       0, 0, 0, 2, 2, 1, 0, 1, 0, 2, 2, 2, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 2, 0, 0, 2, 0, 2, 1, 1, 0, 0, 2,
       1, 2, 1, 2, 2, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0,
       1, 0, 0, 0, 1, 2, 2, 2, 1, 1, 0, 0, 1, 1, 1, 2, 0, 0, 0, 0, 0, 2,
       0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0,
       0, 0, 0, 0, 1, 0, 1, 1, 1, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 0, 1,
       2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 0, 0, 0, 1, 1, 2, 2, 0, 1, 1, 0, 0,
       0, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 2, 2, 0,
       1, 1, 0, 1, 1, 1, 0, 1, 2, 0, 0, 0, 0, 0, 1, 2, 2, 0, 2, 0, 1, 0,
       1, 0, 1, 0, 0, 1, 2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 2, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 2, 0, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1])
In [44]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[44]:
array([1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 2, 2, 2, 0, 0,
       0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 2, 0, 2, 1, 1, 1, 0,
       0, 1, 0, 0, 0, 0, 1, 1, 2, 2, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1,
       1, 0, 0, 2, 2, 2, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0,
       1, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 1, 0, 1, 1, 2, 1, 1, 0, 1, 1, 1,
       2, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 2, 0, 2, 2, 2, 1, 0, 1,
       0, 0, 0, 2, 2, 1, 0, 1, 0, 2, 2, 2, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 2, 0, 0, 2, 0, 2, 1, 1, 0, 0, 2,
       1, 2, 1, 2, 2, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0,
       1, 0, 0, 0, 1, 2, 2, 2, 1, 1, 0, 0, 1, 1, 1, 2, 0, 0, 0, 0, 0, 2,
       0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0,
       0, 0, 0, 0, 1, 0, 1, 1, 1, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 0, 1,
       2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 0, 0, 0, 1, 1, 2, 2, 0, 1, 1, 0, 0,
       0, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 2, 2, 0,
       1, 1, 0, 1, 1, 1, 0, 1, 2, 0, 0, 0, 0, 0, 1, 2, 2, 0, 2, 0, 1, 0,
       1, 0, 1, 0, 0, 1, 2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 2, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 2, 0, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1])
In [45]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [46]:
X
Out[46]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 Cluster chosen
0 1.709998 0.081666 2.128489 0.497958 2.506539 1.564462 -0.544626 -0.212915 -0.703695 0.450360 -0.955214 0.070111 1 0
1 1.306748 0.425280 1.373850 -0.510593 -0.188549 0.109582 0.202407 1.738301 -0.130093 0.581343 -0.188241 0.261952 1 0
2 1.734670 -1.372888 -1.154236 0.358473 -1.159185 -1.272362 -1.103368 1.630111 -1.333754 -1.332400 -0.823796 -1.678793 0 0
3 1.734670 -1.064259 -0.709050 0.536782 -0.829959 -1.054348 -0.888469 1.121852 -0.885262 -0.799099 -1.043935 -1.223713 0 0
4 1.734670 1.475232 0.304731 1.153720 1.154918 -0.480466 0.141495 0.952700 0.366988 -0.320424 -0.249684 -0.286209 1 0
5 -1.357454 -1.560365 -0.993389 2.575326 -0.840510 -0.885550 1.098423 0.910833 -1.901272 -1.828920 -1.282786 -1.273477 0 0
6 0.321543 -0.501364 -0.339054 0.091827 -0.326838 0.016241 -0.078257 0.113313 1.271259 -0.267013 0.231504 1.610523 1 0
7 1.097999 -0.427045 -0.234103 1.592537 -0.309903 2.367725 -0.109067 1.728224 0.757555 1.272787 1.334116 1.609735 1 0
8 1.734670 0.468400 -0.344386 -0.090068 -0.397143 1.133967 -0.709333 -0.262112 -0.367753 0.321938 0.506700 2.032213 1 0
9 -1.359746 -1.205127 -0.671317 -0.932419 -0.674974 -1.006055 -0.652429 0.407833 -0.706174 0.460005 1.361925 -0.543693 0 0
10 1.587306 0.209036 1.657840 0.476532 1.214490 1.589653 0.700434 0.894283 0.451061 0.995991 1.361926 1.007772 1 0
11 1.734670 0.104209 -0.231131 -0.245206 -0.310258 -0.256497 -0.023748 0.747830 -0.408037 -0.453141 -0.079052 1.036713 1 0
12 1.035685 0.546933 0.622772 -0.701164 -0.677402 -1.056329 -0.597825 -1.035795 1.271259 0.922609 0.604807 1.083653 1 0
13 0.972281 0.789016 0.702993 0.179892 -0.398625 -0.659406 -0.324016 0.383199 1.271259 0.519070 -0.328565 1.268306 1 0
14 -1.133199 0.756391 -0.892669 -1.306858 -1.039211 -0.140379 -1.019984 -0.954278 -0.606554 -0.698781 1.361926 -0.845614 0 0
15 -0.286686 -0.893181 -0.864608 -1.025582 -1.145054 -1.306954 -0.907368 1.339780 0.096326 0.053290 1.361926 -0.418235 0 0
16 -1.078617 -1.219578 -0.935650 -0.892424 -1.324305 -0.677583 -1.147006 -0.197405 -1.202819 -1.032993 1.361926 -0.456323 0 0
17 -0.779483 -0.945540 -0.338284 -0.640770 2.494550 1.423590 2.049647 -1.134775 -0.280816 -0.841355 -0.559543 2.119588 2 0
18 -0.539003 0.776852 0.530745 -0.519374 2.508300 1.650614 3.324739 -0.880955 0.666814 -0.537766 -0.411351 1.953562 2 0
19 -0.654289 -0.841544 0.162215 -0.248271 1.621296 1.601864 3.324739 -1.014659 -0.291794 -0.730857 -0.463545 2.105096 2 0
20 0.213529 1.051570 2.128489 2.214653 -1.312378 2.005231 -1.146344 -1.733450 -1.671687 -1.708415 1.033382 -1.042990 0 0
21 -1.008970 0.227216 -0.685589 -0.789964 -1.174654 -0.198868 -1.080110 -1.082456 0.645207 -1.226921 1.361926 -0.957975 0 0
22 -0.509770 -0.100539 -0.507952 -0.571581 -1.187009 0.856048 -0.968467 -1.269919 1.271259 -0.525888 1.209173 -0.582980 0 0
23 -0.434214 -1.542384 0.834388 -1.429739 -1.306939 1.072244 -1.147365 -1.310384 -2.099689 0.794263 1.361926 -1.700448 0 0
24 1.024011 -0.133360 2.128488 -0.497823 -0.578522 0.203487 0.200158 1.499097 0.406494 1.107965 -0.366218 0.290605 1 0
25 0.996972 -0.068300 0.463479 -0.462229 -0.557242 -0.488353 0.319927 -0.745740 0.672891 1.272786 -0.392191 0.156034 1 0
26 1.734670 -0.636382 0.769493 -0.963728 -0.858496 0.118250 -0.164546 -0.383110 -0.373161 0.540783 0.484812 -0.694416 0 0
27 0.939519 -0.546060 1.545261 -0.614695 -0.350160 0.761016 -0.827209 -0.140902 -1.236174 1.272787 0.685149 -0.743315 1 0
28 0.065764 -1.072686 1.002347 0.205191 0.149629 0.960761 -0.441845 1.071626 -0.641837 1.004214 1.361926 -0.922358 1 0
29 1.734670 -1.368860 0.533780 -0.644543 -1.328265 0.179244 -1.144397 -1.100556 -2.100911 -0.305756 -0.595463 -1.054706 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
342 -0.855930 0.826496 -0.453379 -0.949219 0.299850 -1.090327 0.918687 -1.108240 -0.838253 -0.500788 -0.690370 2.119588 2 1
343 -1.395065 -0.085953 1.922929 -0.223554 1.095776 -0.911609 -0.062340 1.738301 -1.726601 -0.869379 -1.690911 0.185932 2 1
344 -0.298021 -1.181741 -0.986984 -1.060049 -0.912388 -1.297902 -1.142678 -1.388649 -0.921552 1.272787 -1.272055 -1.334409 0 1
345 -1.545817 -1.409239 -1.442796 -1.421649 -0.183976 -1.246279 -1.097348 -1.266514 -1.348236 1.272787 -1.481572 -1.042455 0 1
346 -0.346415 -1.039454 -1.018195 -0.914582 -0.378597 -1.221697 -1.136525 -0.552667 -0.552626 1.272787 -1.056006 -1.074637 0 1
347 -1.474386 0.029941 -0.538026 -0.402193 2.847023 1.102542 2.185053 -1.039952 -2.054190 -1.784163 -1.788691 -0.164281 2 1
348 0.118230 0.491129 -1.420467 1.101952 -1.224983 2.813714 1.536334 -1.700918 0.266362 -2.009383 0.865783 -1.676405 0 1
349 0.348396 0.059205 -0.665990 2.575326 -0.216770 1.953864 0.561548 -1.182548 0.195642 -1.279866 0.455845 -0.516320 1 1
350 1.734670 -0.003146 1.568996 -0.434837 0.521805 0.570604 0.538883 1.192315 -0.104273 0.910080 -0.509533 0.299932 1 1
351 -0.099944 -0.751191 2.128489 -1.036671 0.343684 1.099881 -0.554111 0.079251 -1.057160 1.199793 -1.225953 -0.635720 1 1
352 1.084858 -0.145339 0.813709 -0.936272 0.827416 1.989130 -0.287384 -0.189383 -0.826764 1.272787 -0.984655 -0.043832 1 1
353 1.187787 -0.429263 -0.071507 0.608020 0.731972 1.256808 -0.000437 0.609195 1.271259 0.812543 0.861217 1.341364 1 1
354 0.816380 -0.786929 -0.447031 -0.170697 0.510269 0.818066 1.142880 0.163693 0.937487 1.093106 1.361926 1.303202 1 1
355 1.118179 -0.721368 -0.381566 0.060659 0.909982 1.044335 1.603314 0.291249 0.641018 1.101106 1.361926 1.616319 1 1
356 1.226095 0.286296 -0.087024 -0.242071 -0.449252 -0.446803 -0.671768 -0.078598 1.271259 0.835145 0.775448 0.595847 1 1
357 1.613198 0.199015 0.291700 -0.240822 0.410003 0.405486 -0.473827 0.672002 0.944358 1.272787 0.606300 0.775759 1 1
358 0.890203 -0.190496 0.624245 -0.555802 0.066592 -0.255891 -0.711912 0.103160 0.368297 1.272787 -0.020119 0.448029 1 1
359 -1.618544 2.294335 -0.896404 1.636479 0.891626 -1.405106 -0.282123 -1.727463 0.411358 -1.394589 -1.825966 0.156317 2 1
360 -1.152545 1.039906 -0.408095 -0.456317 -0.531367 -1.073809 -0.666121 -1.066545 1.271259 -0.911866 -1.381364 -1.175728 0 1
361 -0.667401 2.294335 1.170804 0.615303 1.057413 -0.853776 1.839336 -1.553629 0.257124 -0.175382 -1.194146 -0.159446 2 1
362 -1.723669 0.092346 -0.153795 -1.289641 -0.293218 -1.396202 3.324739 -0.906893 -1.978969 -0.337704 -1.827015 0.302210 2 1
363 -1.723669 -0.253829 1.824256 -1.430713 0.052559 -1.405490 2.236827 0.207454 -2.097736 1.272787 -1.501135 1.113279 2 1
364 -1.723669 -0.295522 0.733720 -1.430713 -0.944463 -1.405490 2.817768 -1.650910 -2.100911 1.272786 -1.827015 -0.821891 2 1
365 -0.736785 -0.662679 2.128489 -0.285797 0.766661 0.905055 0.092249 0.094889 -1.072136 0.646308 -0.309608 -0.624335 1 1
366 0.261615 -0.713681 2.128489 -0.645248 0.263270 1.334024 -0.452231 1.223459 -1.251130 0.300478 0.115550 -0.729146 1 1
367 -0.720122 -0.373744 1.948953 0.317220 1.674285 1.748469 0.387958 0.782162 -0.402149 1.272787 0.811932 -0.459207 1 1
368 -0.090607 0.425445 1.059996 0.031889 0.100842 0.641548 0.277516 1.419713 1.271259 0.704201 0.112480 0.417123 1 1
369 1.263139 0.315568 2.128489 0.016454 1.564635 1.785525 0.652075 1.086427 0.193496 0.271812 -0.412142 0.549110 1 1
370 -1.393336 0.445276 -0.245554 2.057913 1.071731 -0.067700 0.357666 1.738300 1.036275 0.316255 0.704801 -0.474007 1 1
371 0.356389 -0.292506 -0.400481 -0.103549 -0.758012 1.042100 -0.936500 1.738300 0.568841 0.239851 -0.233628 0.122538 1 1

372 rows × 14 columns

In [47]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[47]:
<matplotlib.axes._subplots.AxesSubplot at 0x1ef5e801358>
In [48]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[1]))

Club De Banqueros y Empresarios

ANN

In [421]:
X = df_n_ps_std_ch[1]
In [422]:
y = df_n_ps[1]['chosen']
In [423]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [424]:
X_train.shape
Out[424]:
(191, 12)
In [53]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [54]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [55]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [60]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (10,), 'learning_rate_init': 0.004, 'max_iter': 500}, que permiten obtener un Accuracy de 78.53% y un Kappa del 40.11
Tiempo total: 24.99 minutos
In [425]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.004
epochs = 500
In [426]:
input_tensor = Input(shape = (n0,))
In [427]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [428]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [429]:
model.summary()
Model: "model_22"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_22 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_70 (Dense)             (None, 10)                130       
_________________________________________________________________
dense_71 (Dense)             (None, 1)                 11        
=================================================================
Total params: 141
Trainable params: 141
Non-trainable params: 0
_________________________________________________________________
In [430]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 191 samples, validate on 64 samples
Epoch 1/500
191/191 [==============================] - 0s 1ms/step - loss: 0.8012 - accuracy: 0.4607 - val_loss: 0.7811 - val_accuracy: 0.4688
Epoch 2/500
191/191 [==============================] - 0s 63us/step - loss: 0.7341 - accuracy: 0.4921 - val_loss: 0.7282 - val_accuracy: 0.5156
Epoch 3/500
191/191 [==============================] - 0s 68us/step - loss: 0.6797 - accuracy: 0.5602 - val_loss: 0.6882 - val_accuracy: 0.5469
Epoch 4/500
191/191 [==============================] - 0s 99us/step - loss: 0.6390 - accuracy: 0.6283 - val_loss: 0.6650 - val_accuracy: 0.5469
Epoch 5/500
191/191 [==============================] - 0s 73us/step - loss: 0.6126 - accuracy: 0.6754 - val_loss: 0.6514 - val_accuracy: 0.6250
Epoch 6/500
191/191 [==============================] - 0s 63us/step - loss: 0.5926 - accuracy: 0.6963 - val_loss: 0.6429 - val_accuracy: 0.6250
Epoch 7/500
191/191 [==============================] - 0s 63us/step - loss: 0.5764 - accuracy: 0.7120 - val_loss: 0.6381 - val_accuracy: 0.6406
Epoch 8/500
191/191 [==============================] - 0s 63us/step - loss: 0.5633 - accuracy: 0.7277 - val_loss: 0.6353 - val_accuracy: 0.6406
Epoch 9/500
191/191 [==============================] - 0s 58us/step - loss: 0.5510 - accuracy: 0.7435 - val_loss: 0.6376 - val_accuracy: 0.6562
Epoch 10/500
191/191 [==============================] - 0s 58us/step - loss: 0.5416 - accuracy: 0.7435 - val_loss: 0.6412 - val_accuracy: 0.6406
Epoch 11/500
191/191 [==============================] - 0s 73us/step - loss: 0.5322 - accuracy: 0.7382 - val_loss: 0.6441 - val_accuracy: 0.6250
Epoch 12/500
191/191 [==============================] - 0s 63us/step - loss: 0.5248 - accuracy: 0.7382 - val_loss: 0.6483 - val_accuracy: 0.6562
Epoch 13/500
191/191 [==============================] - 0s 68us/step - loss: 0.5179 - accuracy: 0.7330 - val_loss: 0.6519 - val_accuracy: 0.6562
Epoch 14/500
191/191 [==============================] - 0s 68us/step - loss: 0.5124 - accuracy: 0.7330 - val_loss: 0.6549 - val_accuracy: 0.6562
Epoch 15/500
191/191 [==============================] - 0s 73us/step - loss: 0.5067 - accuracy: 0.7382 - val_loss: 0.6590 - val_accuracy: 0.6562
Epoch 16/500
191/191 [==============================] - 0s 73us/step - loss: 0.5023 - accuracy: 0.7382 - val_loss: 0.6634 - val_accuracy: 0.6719
Epoch 17/500
191/191 [==============================] - 0s 68us/step - loss: 0.4988 - accuracy: 0.7382 - val_loss: 0.6681 - val_accuracy: 0.6719
Epoch 18/500
191/191 [==============================] - 0s 68us/step - loss: 0.4943 - accuracy: 0.7435 - val_loss: 0.6703 - val_accuracy: 0.6719
Epoch 19/500
191/191 [==============================] - 0s 68us/step - loss: 0.4903 - accuracy: 0.7539 - val_loss: 0.6718 - val_accuracy: 0.6719
Epoch 20/500
191/191 [==============================] - 0s 68us/step - loss: 0.4874 - accuracy: 0.7539 - val_loss: 0.6743 - val_accuracy: 0.6719
Epoch 21/500
191/191 [==============================] - 0s 68us/step - loss: 0.4848 - accuracy: 0.7539 - val_loss: 0.6766 - val_accuracy: 0.6719
Epoch 22/500
191/191 [==============================] - 0s 63us/step - loss: 0.4821 - accuracy: 0.7539 - val_loss: 0.6816 - val_accuracy: 0.6719
Epoch 23/500
191/191 [==============================] - 0s 58us/step - loss: 0.4789 - accuracy: 0.7539 - val_loss: 0.6825 - val_accuracy: 0.6719
Epoch 24/500
191/191 [==============================] - 0s 68us/step - loss: 0.4769 - accuracy: 0.7539 - val_loss: 0.6855 - val_accuracy: 0.6562
Epoch 25/500
191/191 [==============================] - 0s 63us/step - loss: 0.4745 - accuracy: 0.7539 - val_loss: 0.6863 - val_accuracy: 0.6562
Epoch 26/500
191/191 [==============================] - 0s 89us/step - loss: 0.4714 - accuracy: 0.7592 - val_loss: 0.6885 - val_accuracy: 0.6562

Epoch 00026: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 27/500
191/191 [==============================] - 0s 89us/step - loss: 0.4696 - accuracy: 0.7592 - val_loss: 0.6896 - val_accuracy: 0.6562
Epoch 28/500
191/191 [==============================] - 0s 78us/step - loss: 0.4682 - accuracy: 0.7696 - val_loss: 0.6888 - val_accuracy: 0.6562
Epoch 29/500
191/191 [==============================] - 0s 73us/step - loss: 0.4671 - accuracy: 0.7644 - val_loss: 0.6874 - val_accuracy: 0.6562
Epoch 30/500
191/191 [==============================] - 0s 68us/step - loss: 0.4663 - accuracy: 0.7749 - val_loss: 0.6893 - val_accuracy: 0.6562
Epoch 31/500
191/191 [==============================] - 0s 68us/step - loss: 0.4650 - accuracy: 0.7749 - val_loss: 0.6893 - val_accuracy: 0.6562
Epoch 32/500
191/191 [==============================] - 0s 68us/step - loss: 0.4640 - accuracy: 0.7801 - val_loss: 0.6907 - val_accuracy: 0.6562
Epoch 33/500
191/191 [==============================] - 0s 63us/step - loss: 0.4626 - accuracy: 0.7906 - val_loss: 0.6901 - val_accuracy: 0.6562
Epoch 34/500
191/191 [==============================] - 0s 63us/step - loss: 0.4617 - accuracy: 0.7853 - val_loss: 0.6901 - val_accuracy: 0.6562
Epoch 35/500
191/191 [==============================] - 0s 63us/step - loss: 0.4605 - accuracy: 0.7958 - val_loss: 0.6905 - val_accuracy: 0.6562
Epoch 36/500
191/191 [==============================] - 0s 63us/step - loss: 0.4599 - accuracy: 0.7906 - val_loss: 0.6900 - val_accuracy: 0.6562

Epoch 00036: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 37/500
191/191 [==============================] - 0s 63us/step - loss: 0.4582 - accuracy: 0.7906 - val_loss: 0.6902 - val_accuracy: 0.6562
Epoch 38/500
191/191 [==============================] - 0s 63us/step - loss: 0.4579 - accuracy: 0.7906 - val_loss: 0.6908 - val_accuracy: 0.6562
Epoch 39/500
191/191 [==============================] - 0s 63us/step - loss: 0.4572 - accuracy: 0.7906 - val_loss: 0.6913 - val_accuracy: 0.6562
Epoch 40/500
191/191 [==============================] - 0s 73us/step - loss: 0.4568 - accuracy: 0.7958 - val_loss: 0.6919 - val_accuracy: 0.6562
Epoch 41/500
191/191 [==============================] - 0s 68us/step - loss: 0.4564 - accuracy: 0.7958 - val_loss: 0.6923 - val_accuracy: 0.6562
Epoch 42/500
191/191 [==============================] - 0s 68us/step - loss: 0.4556 - accuracy: 0.7958 - val_loss: 0.6925 - val_accuracy: 0.6562
Epoch 43/500
191/191 [==============================] - 0s 73us/step - loss: 0.4551 - accuracy: 0.7958 - val_loss: 0.6927 - val_accuracy: 0.6562
Epoch 44/500
191/191 [==============================] - 0s 73us/step - loss: 0.4546 - accuracy: 0.7958 - val_loss: 0.6930 - val_accuracy: 0.6562
Epoch 45/500
191/191 [==============================] - 0s 73us/step - loss: 0.4540 - accuracy: 0.7958 - val_loss: 0.6926 - val_accuracy: 0.6562
Epoch 46/500
191/191 [==============================] - 0s 63us/step - loss: 0.4535 - accuracy: 0.7958 - val_loss: 0.6925 - val_accuracy: 0.6562

Epoch 00046: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 47/500
191/191 [==============================] - 0s 68us/step - loss: 0.4528 - accuracy: 0.7958 - val_loss: 0.6927 - val_accuracy: 0.6562
Epoch 48/500
191/191 [==============================] - 0s 73us/step - loss: 0.4526 - accuracy: 0.7958 - val_loss: 0.6928 - val_accuracy: 0.6562
Epoch 49/500
191/191 [==============================] - 0s 73us/step - loss: 0.4523 - accuracy: 0.7958 - val_loss: 0.6930 - val_accuracy: 0.6562
Epoch 50/500
191/191 [==============================] - 0s 68us/step - loss: 0.4520 - accuracy: 0.7958 - val_loss: 0.6930 - val_accuracy: 0.6562
Epoch 51/500
191/191 [==============================] - 0s 68us/step - loss: 0.4518 - accuracy: 0.7958 - val_loss: 0.6933 - val_accuracy: 0.6562
Epoch 52/500
191/191 [==============================] - 0s 68us/step - loss: 0.4515 - accuracy: 0.7958 - val_loss: 0.6932 - val_accuracy: 0.6562
Epoch 53/500
191/191 [==============================] - 0s 52us/step - loss: 0.4513 - accuracy: 0.7958 - val_loss: 0.6932 - val_accuracy: 0.6562
Epoch 54/500
191/191 [==============================] - 0s 58us/step - loss: 0.4510 - accuracy: 0.7958 - val_loss: 0.6932 - val_accuracy: 0.6562
Epoch 55/500
191/191 [==============================] - 0s 105us/step - loss: 0.4507 - accuracy: 0.7958 - val_loss: 0.6931 - val_accuracy: 0.6562
Epoch 56/500
191/191 [==============================] - 0s 84us/step - loss: 0.4504 - accuracy: 0.7958 - val_loss: 0.6932 - val_accuracy: 0.6562

Epoch 00056: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 57/500
191/191 [==============================] - 0s 84us/step - loss: 0.4501 - accuracy: 0.7958 - val_loss: 0.6933 - val_accuracy: 0.6562
Epoch 58/500
191/191 [==============================] - 0s 78us/step - loss: 0.4500 - accuracy: 0.7958 - val_loss: 0.6934 - val_accuracy: 0.6562
Epoch 59/500
191/191 [==============================] - 0s 52us/step - loss: 0.4498 - accuracy: 0.7958 - val_loss: 0.6934 - val_accuracy: 0.6562
Epoch 60/500
191/191 [==============================] - 0s 63us/step - loss: 0.4497 - accuracy: 0.7958 - val_loss: 0.6933 - val_accuracy: 0.6562
Epoch 61/500
191/191 [==============================] - 0s 58us/step - loss: 0.4496 - accuracy: 0.7958 - val_loss: 0.6933 - val_accuracy: 0.6562
Epoch 62/500
191/191 [==============================] - 0s 58us/step - loss: 0.4494 - accuracy: 0.7958 - val_loss: 0.6935 - val_accuracy: 0.6562
Epoch 63/500
191/191 [==============================] - 0s 58us/step - loss: 0.4493 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 64/500
191/191 [==============================] - 0s 63us/step - loss: 0.4492 - accuracy: 0.7958 - val_loss: 0.6934 - val_accuracy: 0.6562
Epoch 65/500
191/191 [==============================] - 0s 58us/step - loss: 0.4490 - accuracy: 0.7958 - val_loss: 0.6935 - val_accuracy: 0.6562
Epoch 66/500
191/191 [==============================] - 0s 58us/step - loss: 0.4489 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562

Epoch 00066: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 67/500
191/191 [==============================] - 0s 58us/step - loss: 0.4487 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 68/500
191/191 [==============================] - 0s 73us/step - loss: 0.4487 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 69/500
191/191 [==============================] - 0s 63us/step - loss: 0.4486 - accuracy: 0.7958 - val_loss: 0.6935 - val_accuracy: 0.6562
Epoch 70/500
191/191 [==============================] - 0s 52us/step - loss: 0.4486 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 71/500
191/191 [==============================] - 0s 52us/step - loss: 0.4484 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 72/500
191/191 [==============================] - 0s 52us/step - loss: 0.4484 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 73/500
191/191 [==============================] - 0s 58us/step - loss: 0.4483 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 74/500
191/191 [==============================] - 0s 52us/step - loss: 0.4482 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 75/500
191/191 [==============================] - 0s 58us/step - loss: 0.4482 - accuracy: 0.7958 - val_loss: 0.6936 - val_accuracy: 0.6562
Epoch 76/500
191/191 [==============================] - 0s 58us/step - loss: 0.4481 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562

Epoch 00076: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 77/500
191/191 [==============================] - 0s 58us/step - loss: 0.4480 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 78/500
191/191 [==============================] - 0s 47us/step - loss: 0.4480 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 79/500
191/191 [==============================] - 0s 58us/step - loss: 0.4480 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 80/500
191/191 [==============================] - 0s 110us/step - loss: 0.4479 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 81/500
191/191 [==============================] - 0s 63us/step - loss: 0.4479 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 82/500
191/191 [==============================] - 0s 68us/step - loss: 0.4479 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 83/500
191/191 [==============================] - 0s 58us/step - loss: 0.4478 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 84/500
191/191 [==============================] - 0s 58us/step - loss: 0.4478 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 85/500
191/191 [==============================] - 0s 52us/step - loss: 0.4477 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 86/500
191/191 [==============================] - 0s 47us/step - loss: 0.4477 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00086: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 87/500
191/191 [==============================] - 0s 52us/step - loss: 0.4477 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 88/500
191/191 [==============================] - 0s 58us/step - loss: 0.4477 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 89/500
191/191 [==============================] - 0s 52us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 90/500
191/191 [==============================] - 0s 58us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6937 - val_accuracy: 0.6562
Epoch 91/500
191/191 [==============================] - 0s 68us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 92/500
191/191 [==============================] - 0s 52us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 93/500
191/191 [==============================] - 0s 52us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 94/500
191/191 [==============================] - 0s 63us/step - loss: 0.4476 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 95/500
191/191 [==============================] - 0s 58us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 96/500
191/191 [==============================] - 0s 58us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00096: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 97/500
191/191 [==============================] - 0s 58us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 98/500
191/191 [==============================] - 0s 68us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 99/500
191/191 [==============================] - 0s 68us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 100/500
191/191 [==============================] - 0s 63us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 101/500
191/191 [==============================] - 0s 52us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 102/500
191/191 [==============================] - 0s 58us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 103/500
191/191 [==============================] - 0s 52us/step - loss: 0.4475 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 104/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 105/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 106/500
191/191 [==============================] - 0s 120us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00106: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06.
Epoch 107/500
191/191 [==============================] - 0s 63us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 108/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 109/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 110/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 111/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 112/500
191/191 [==============================] - 0s 63us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 113/500
191/191 [==============================] - 0s 63us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 114/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 115/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 116/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00116: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06.
Epoch 117/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 118/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 119/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 120/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 121/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 122/500
191/191 [==============================] - 0s 58us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 123/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 124/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 125/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 126/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00126: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06.
Epoch 127/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 128/500
191/191 [==============================] - 0s 52us/step - loss: 0.4474 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 129/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 130/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 131/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 132/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 133/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 134/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 135/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 136/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00136: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07.
Epoch 137/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 138/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 139/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 140/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 141/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 142/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 143/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 144/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 145/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 146/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00146: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07.
Epoch 147/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 148/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 149/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 150/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 151/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 152/500
191/191 [==============================] - ETA: 0s - loss: 0.5667 - accuracy: 0.68 - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 153/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 154/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 155/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 156/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00156: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07.
Epoch 157/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 158/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 159/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 160/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 161/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 162/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 163/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 164/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 165/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 166/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00166: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07.
Epoch 167/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 168/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 169/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 170/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 171/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 172/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 173/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 174/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 175/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 176/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00176: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08.
Epoch 177/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 178/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 179/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 180/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 181/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 182/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 183/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 184/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 185/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 186/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00186: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08.
Epoch 187/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 188/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 189/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 190/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 191/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 192/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 193/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 194/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 195/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 196/500
191/191 [==============================] - 0s 99us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00196: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08.
Epoch 197/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 198/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 199/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 200/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 201/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 202/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 203/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 204/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 205/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 206/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00206: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09.
Epoch 207/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 208/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 209/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 210/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 211/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 212/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 213/500
191/191 [==============================] - 0s 89us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 214/500
191/191 [==============================] - 0s 94us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 215/500
191/191 [==============================] - 0s 94us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 216/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00216: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09.
Epoch 217/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 218/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 219/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 220/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 221/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 222/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 223/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 224/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 225/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 226/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00226: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09.
Epoch 227/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 228/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 229/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 230/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 231/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 232/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 233/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 234/500
191/191 [==============================] - 0s 99us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 235/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 236/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00236: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10.
Epoch 237/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 238/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 239/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 240/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 241/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 242/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 243/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 244/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 245/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 246/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00246: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10.
Epoch 247/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 248/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 249/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 250/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 251/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 252/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 253/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 254/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 255/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 256/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00256: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10.
Epoch 257/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 258/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 259/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 260/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 261/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 262/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 263/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 264/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 265/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 266/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00266: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10.
Epoch 267/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 268/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 269/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 270/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 271/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 272/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 273/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 274/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 275/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 276/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00276: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11.
Epoch 277/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 278/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 279/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 280/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 281/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 282/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 283/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 284/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 285/500
191/191 [==============================] - 0s 94us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 286/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00286: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11.
Epoch 287/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 288/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 289/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 290/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 291/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 292/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 293/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 294/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 295/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 296/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00296: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11.
Epoch 297/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 298/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 299/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 300/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 301/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 302/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 303/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 304/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 305/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 306/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00306: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12.
Epoch 307/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 308/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 309/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 310/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 311/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 312/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 313/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 314/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 315/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 316/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00316: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12.
Epoch 317/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 318/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 319/500
191/191 [==============================] - 0s 105us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 320/500
191/191 [==============================] - 0s 89us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 321/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 322/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 323/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 324/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 325/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 326/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00326: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12.
Epoch 327/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 328/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 329/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 330/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 331/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 332/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 333/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 334/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 335/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 336/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00336: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13.
Epoch 337/500
191/191 [==============================] - 0s 42us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 338/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 339/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 340/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 341/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 342/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 343/500
191/191 [==============================] - 0s 42us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 344/500
191/191 [==============================] - 0s 42us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 345/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 346/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00346: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13.
Epoch 347/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 348/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 349/500
191/191 [==============================] - 0s 42us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 350/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 351/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 352/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 353/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 354/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 355/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 356/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00356: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13.
Epoch 357/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 358/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 359/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 360/500
191/191 [==============================] - 0s 105us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 361/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 362/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 363/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 364/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 365/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 366/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00366: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13.
Epoch 367/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 368/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 369/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 370/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 371/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 372/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 373/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 374/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 375/500
191/191 [==============================] - 0s 84us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 376/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00376: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14.
Epoch 377/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 378/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 379/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 380/500
191/191 [==============================] - 0s 110us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 381/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 382/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 383/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 384/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 385/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 386/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00386: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14.
Epoch 387/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 388/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 389/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 390/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 391/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 392/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 393/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 394/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 395/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 396/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00396: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14.
Epoch 397/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 398/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 399/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 400/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 401/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 402/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 403/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 404/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 405/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 406/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00406: ReduceLROnPlateau reducing learning rate to 7.275957959772868e-15.
Epoch 407/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 408/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 409/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 410/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 411/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 412/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 413/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 414/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 415/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 416/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00416: ReduceLROnPlateau reducing learning rate to 3.637978979886434e-15.
Epoch 417/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 418/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 419/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 420/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 421/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 422/500
191/191 [==============================] - 0s 99us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 423/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 424/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 425/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 426/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00426: ReduceLROnPlateau reducing learning rate to 1.818989489943217e-15.
Epoch 427/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 428/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 429/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 430/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 431/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 432/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 433/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 434/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 435/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 436/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00436: ReduceLROnPlateau reducing learning rate to 9.094947449716085e-16.
Epoch 437/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 438/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 439/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 440/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 441/500
191/191 [==============================] - 0s 115us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 442/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 443/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 444/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 445/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 446/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00446: ReduceLROnPlateau reducing learning rate to 4.547473724858043e-16.
Epoch 447/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 448/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 449/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 450/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 451/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 452/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 453/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 454/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 455/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 456/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00456: ReduceLROnPlateau reducing learning rate to 2.2737368624290214e-16.
Epoch 457/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 458/500
191/191 [==============================] - 0s 73us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 459/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 460/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 461/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 462/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 463/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 464/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 465/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 466/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00466: ReduceLROnPlateau reducing learning rate to 1.1368684312145107e-16.
Epoch 467/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 468/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 469/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 470/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 471/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 472/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 473/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 474/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 475/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 476/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00476: ReduceLROnPlateau reducing learning rate to 5.684342156072553e-17.
Epoch 477/500
191/191 [==============================] - 0s 68us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 478/500
191/191 [==============================] - 0s 78us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 479/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 480/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 481/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 482/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 483/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 484/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 485/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 486/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00486: ReduceLROnPlateau reducing learning rate to 2.842171078036277e-17.
Epoch 487/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 488/500
191/191 [==============================] - 0s 47us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 489/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 490/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 491/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 492/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 493/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 494/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 495/500
191/191 [==============================] - 0s 99us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 496/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562

Epoch 00496: ReduceLROnPlateau reducing learning rate to 1.4210855390181384e-17.
Epoch 497/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 498/500
191/191 [==============================] - 0s 63us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 499/500
191/191 [==============================] - 0s 58us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
Epoch 500/500
191/191 [==============================] - 0s 52us/step - loss: 0.4473 - accuracy: 0.7958 - val_loss: 0.6938 - val_accuracy: 0.6562
In [431]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 500)
In [432]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step
test loss: 0.6938145160675049, test accuracy: 0.65625
In [433]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.5519480519480519
In [434]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.053763440860215006

KMeans

In [71]:
X
Out[71]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12
0 -0.369691 -0.881824 -0.095656 -0.923999 -0.030645 -0.834931 -1.031650 -0.840942 -0.677716 1.084098 -1.064999 -1.156623
1 -0.175875 -0.403800 -0.657709 -0.201259 1.691433 -0.672783 -0.119944 -0.440080 0.339906 1.084098 0.504608 0.931676
2 0.894452 -0.189794 1.959063 0.169276 -0.403611 -1.036954 1.447615 -0.340767 -0.846170 -0.515065 -0.699878 0.032355
3 0.060782 -0.392075 0.826233 -0.048480 1.789786 -0.552163 0.121028 -0.111355 0.220614 1.084098 0.073241 1.176257
4 -1.116536 -0.923742 -1.238971 -0.919598 0.313068 -1.160111 1.316032 -0.700013 -1.600210 1.084098 -1.072155 1.270095
5 -1.082752 -0.067472 -1.142511 -0.923999 0.389201 -1.061180 2.067035 -0.269819 -1.620482 -0.468180 -0.962315 1.853511
6 -1.202528 -0.776453 -1.135820 -0.773268 -0.409932 -0.832224 -0.631931 -0.119492 1.620288 0.162930 -1.063358 0.001987
7 -1.206944 0.500703 -0.243295 -0.611928 0.262760 -0.855004 0.001441 -1.579225 0.245997 1.084098 -1.197067 -0.681282
8 -0.982006 0.421831 0.984997 -0.550391 0.215104 -1.100712 2.248321 -0.336428 1.282109 1.084098 -0.061910 0.480931
9 1.703175 1.029234 1.508859 0.964653 2.164165 0.482073 -0.794175 0.729102 0.721867 0.439251 0.198086 1.778147
10 1.528139 0.898498 0.923889 0.634045 2.016059 0.674138 -0.430188 0.558129 1.200855 0.618822 0.291110 1.853511
11 0.334361 -0.301383 -0.450307 -0.470199 -0.977542 0.863046 -0.396657 -0.882307 0.259614 0.809320 1.750633 0.245556
12 0.597458 0.773201 0.182265 0.104921 0.580017 0.644184 1.433111 1.735353 -0.712181 -0.818426 -0.129551 -0.039236
13 -0.493625 1.341798 -0.632970 2.666081 2.015672 1.537362 2.432631 0.500840 1.337627 1.084098 -0.069418 1.097765
14 0.962230 0.028408 0.059003 -0.233385 1.425585 1.226062 1.160066 1.604723 0.272753 1.084098 0.709073 1.356660
15 -0.843247 0.160055 1.959063 -0.354971 -0.294051 -0.485118 -0.796417 -0.212355 -0.168152 -0.782723 -0.232169 -0.234956
16 -0.256403 1.322075 1.520118 0.907583 1.032003 0.577931 0.414295 0.551597 0.783033 1.084098 0.088744 1.848078
17 -0.456352 2.142046 1.959063 1.734877 1.472675 0.985568 0.646614 0.230853 0.051480 -1.122573 -0.594948 0.587848
18 -0.637040 -1.030219 -1.165495 -0.606800 2.164165 -1.161299 -0.882343 1.727762 -1.021803 -1.420384 -1.197067 -1.157909
19 -1.204564 -1.022455 -0.683305 -0.923999 -0.935167 -0.988596 -0.688535 1.735353 -0.372860 -1.581696 -1.197067 -0.315002
20 -1.077420 -0.314272 1.036699 -0.850174 -0.071202 -1.104286 0.102157 -0.867378 -0.464391 0.591092 -1.048902 1.853510
21 -0.494728 -1.023105 -0.383945 -0.918858 0.498899 -1.042513 -0.072226 -0.187025 -0.589871 1.084098 -0.986967 0.537859
22 -0.525116 0.187277 1.220635 -0.221678 0.589822 -1.115053 0.421737 0.656325 -0.098846 1.084098 -0.740109 1.853156
23 -0.282675 0.571926 -0.333097 2.860439 1.250860 1.434107 2.006573 0.362041 1.620288 0.896295 -0.274184 1.257508
24 1.898264 1.240876 0.302806 -0.427292 -0.709276 -0.159183 -0.474972 0.273501 -0.789908 -0.085745 0.868592 -0.492577
25 0.449834 -0.315494 -0.187659 -0.426842 -0.875213 0.916315 -0.212134 -0.503325 0.251438 0.499009 1.750633 0.286629
26 -0.745212 -0.457525 -0.261214 1.653228 0.471865 -0.532478 3.048431 -0.344057 0.968569 -1.690268 0.186111 -0.576484
27 -1.146960 2.737908 0.205319 -0.915593 1.224042 -1.080890 2.432039 -1.475399 1.443033 -0.136290 -1.161316 1.353237
28 -0.006670 2.177984 -0.364684 -0.232426 2.164165 -0.501370 0.481222 -0.462802 -0.495909 0.200246 -0.925963 -0.127350
29 -1.198149 1.245381 1.824748 -0.544122 2.094805 -0.986543 1.141795 -1.276929 1.620288 0.772653 -1.183303 -0.633811
... ... ... ... ... ... ... ... ... ... ... ... ...
225 -0.850302 -0.813520 -1.238258 0.389340 -0.820553 -0.147948 -0.867381 -1.187728 1.620288 -0.228859 0.676545 -1.166259
226 1.113887 -0.637363 -0.178882 -0.867049 -0.180374 -0.059762 -1.031280 1.075930 1.620288 -0.831212 -0.536781 -0.852284
227 -0.939216 -1.028651 -1.083821 0.671288 -1.104197 0.395233 -0.935964 -0.545426 -0.734897 -1.634988 1.750633 -0.991450
228 0.323318 -0.538560 1.959063 0.469737 -0.928167 0.315432 -0.838357 0.553622 -0.985928 -0.404420 0.017683 -0.868443
229 0.651351 0.768776 1.651644 0.613579 -0.672599 0.778338 -0.288893 1.735353 -0.698435 -0.803492 0.749774 -0.176628
230 1.208263 0.590562 -0.856200 -0.037362 -0.890995 0.154056 -0.224548 0.060386 1.620288 -0.577590 1.081262 0.137052
231 1.629732 -0.424079 -0.793853 -0.297522 0.876511 -0.502033 -0.540340 1.735353 1.179917 0.331082 -0.186926 0.043014
232 1.097162 0.374225 -0.897801 -0.315126 0.016346 0.367805 -0.397202 0.034201 0.736455 1.084098 1.111692 0.505493
233 1.452404 -0.818664 1.001952 0.094142 -1.157451 -0.673136 -1.031650 -0.681166 -1.578572 -1.040029 1.750633 -1.088779
234 0.687938 0.363684 -0.077785 1.495170 -0.946513 0.162091 -0.875163 -0.486609 -0.657582 -0.728212 1.750633 0.166071
235 -0.511818 -1.019067 1.937312 -0.923999 -1.199069 -1.157422 1.424573 -1.506879 -0.941792 1.084098 -0.598433 1.497327
236 -0.649452 1.110585 0.023607 -0.619494 -0.351503 -0.377758 0.774664 -1.037327 -0.008921 1.084098 -0.300447 -0.799951
237 -0.675917 0.864345 1.959063 -0.702632 0.767520 -0.242236 -0.320118 -0.889868 -0.543499 0.953653 -0.295863 0.184530
238 -0.820946 0.480728 -0.348445 1.706293 -0.634861 -0.548325 -0.658463 -1.446347 -0.595881 -1.346009 1.750633 -0.891882
239 0.335654 -0.570366 0.440736 2.255028 -0.777152 -0.336048 -0.104033 0.504513 -0.304387 -0.899407 1.750633 0.586226
240 0.772849 -0.288034 0.998235 2.707124 0.694491 1.600236 -0.599878 0.863164 1.620288 -0.793363 1.076398 0.313476
241 -0.725775 -0.253169 -1.058923 -0.019515 -0.367824 -0.703472 0.163373 0.093846 1.620288 0.712877 -0.410881 0.776774
242 -0.883133 0.370675 -1.102573 0.232760 -1.047240 -0.968298 0.151912 -0.346068 1.620288 -0.219994 -0.255762 0.629036
243 -0.504299 -0.158035 -0.129250 0.170764 0.127833 -0.424841 0.960604 0.067663 1.620288 0.679838 0.005478 0.547596
244 -0.829496 0.743464 1.959063 0.266679 0.653670 -1.095468 1.900161 1.007911 -0.442746 0.870766 -0.246038 1.728101
245 -1.203285 0.455171 0.496797 -0.873183 0.629642 -1.113864 1.271226 0.542153 -0.636367 1.084098 -1.140705 0.222521
246 -1.122600 -0.442839 1.824660 -0.819762 1.050840 -1.048446 1.937596 0.131208 -0.474964 0.536040 -0.800922 1.853511
247 -1.206944 -0.795159 -1.159900 -0.187089 0.233445 2.452569 0.183293 1.253589 -1.394630 0.753640 -1.083641 -1.163597
248 -0.501382 -1.030219 -0.612979 -0.923999 -0.608020 -0.863086 -0.124132 -0.944272 -1.081271 1.084098 -1.170461 -1.244399
249 -0.704093 -0.985145 1.199508 -0.321552 2.164165 -0.193249 -0.295412 -0.394034 0.032818 -1.672473 -0.724683 -1.067831
250 0.831957 -0.173367 1.636565 1.345345 -0.989257 0.826135 -0.824412 -0.072225 -0.255975 -1.005500 1.750633 -1.067130
251 -0.895156 -1.022380 -0.410545 2.237273 -1.199069 0.687783 -1.006468 -0.694581 -1.311235 -1.270219 1.750633 -1.233006
252 -0.546320 -0.631883 -0.800789 -0.187107 -1.179353 0.464606 -0.905192 -0.113592 0.167123 -0.843254 1.750633 -1.148945
253 -0.591349 -0.947758 -0.915262 -0.579179 0.089961 0.047756 -0.905554 -1.413215 -1.184716 1.084098 0.384684 -1.035788
254 -0.072947 -0.912155 0.150662 -0.246506 -1.198328 -0.779047 -1.031650 0.022522 -1.604883 -1.609572 1.750632 -1.247525

255 rows × 12 columns

In [72]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[72]:
[3060.0,
 2594.1549165385713,
 2309.549481414484,
 2089.610498278143,
 1959.0566777030967,
 1842.9588281368096,
 1736.092921360928,
 1663.5689730025233,
 1609.4951000525748,
 1557.5767570007226,
 1514.8225721032359,
 1447.3960995377222,
 1422.027087832329,
 1389.3032827223215]
In [73]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[73]:
[<matplotlib.lines.Line2D at 0x1ef6210cef0>]

K=2

In [74]:
kmeans_ch = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[74]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [75]:
kmeans_ch.labels_
Out[75]:
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1,
       1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1,
       1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
       1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0,
       0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1])
In [76]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[76]:
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1,
       1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1,
       1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
       1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0,
       0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
       0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1])
In [77]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [78]:
X
Out[78]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 Cluster chosen
0 -0.369691 -0.881824 -0.095656 -0.923999 -0.030645 -0.834931 -1.031650 -0.840942 -0.677716 1.084098 -1.064999 -1.156623 0 0
1 -0.175875 -0.403800 -0.657709 -0.201259 1.691433 -0.672783 -0.119944 -0.440080 0.339906 1.084098 0.504608 0.931676 0 0
2 0.894452 -0.189794 1.959063 0.169276 -0.403611 -1.036954 1.447615 -0.340767 -0.846170 -0.515065 -0.699878 0.032355 0 0
3 0.060782 -0.392075 0.826233 -0.048480 1.789786 -0.552163 0.121028 -0.111355 0.220614 1.084098 0.073241 1.176257 0 0
4 -1.116536 -0.923742 -1.238971 -0.919598 0.313068 -1.160111 1.316032 -0.700013 -1.600210 1.084098 -1.072155 1.270095 0 0
5 -1.082752 -0.067472 -1.142511 -0.923999 0.389201 -1.061180 2.067035 -0.269819 -1.620482 -0.468180 -0.962315 1.853511 0 0
6 -1.202528 -0.776453 -1.135820 -0.773268 -0.409932 -0.832224 -0.631931 -0.119492 1.620288 0.162930 -1.063358 0.001987 0 0
7 -1.206944 0.500703 -0.243295 -0.611928 0.262760 -0.855004 0.001441 -1.579225 0.245997 1.084098 -1.197067 -0.681282 0 0
8 -0.982006 0.421831 0.984997 -0.550391 0.215104 -1.100712 2.248321 -0.336428 1.282109 1.084098 -0.061910 0.480931 0 0
9 1.703175 1.029234 1.508859 0.964653 2.164165 0.482073 -0.794175 0.729102 0.721867 0.439251 0.198086 1.778147 0 0
10 1.528139 0.898498 0.923889 0.634045 2.016059 0.674138 -0.430188 0.558129 1.200855 0.618822 0.291110 1.853511 0 0
11 0.334361 -0.301383 -0.450307 -0.470199 -0.977542 0.863046 -0.396657 -0.882307 0.259614 0.809320 1.750633 0.245556 1 0
12 0.597458 0.773201 0.182265 0.104921 0.580017 0.644184 1.433111 1.735353 -0.712181 -0.818426 -0.129551 -0.039236 0 0
13 -0.493625 1.341798 -0.632970 2.666081 2.015672 1.537362 2.432631 0.500840 1.337627 1.084098 -0.069418 1.097765 0 0
14 0.962230 0.028408 0.059003 -0.233385 1.425585 1.226062 1.160066 1.604723 0.272753 1.084098 0.709073 1.356660 0 0
15 -0.843247 0.160055 1.959063 -0.354971 -0.294051 -0.485118 -0.796417 -0.212355 -0.168152 -0.782723 -0.232169 -0.234956 0 0
16 -0.256403 1.322075 1.520118 0.907583 1.032003 0.577931 0.414295 0.551597 0.783033 1.084098 0.088744 1.848078 0 0
17 -0.456352 2.142046 1.959063 1.734877 1.472675 0.985568 0.646614 0.230853 0.051480 -1.122573 -0.594948 0.587848 0 0
18 -0.637040 -1.030219 -1.165495 -0.606800 2.164165 -1.161299 -0.882343 1.727762 -1.021803 -1.420384 -1.197067 -1.157909 0 0
19 -1.204564 -1.022455 -0.683305 -0.923999 -0.935167 -0.988596 -0.688535 1.735353 -0.372860 -1.581696 -1.197067 -0.315002 0 0
20 -1.077420 -0.314272 1.036699 -0.850174 -0.071202 -1.104286 0.102157 -0.867378 -0.464391 0.591092 -1.048902 1.853510 0 0
21 -0.494728 -1.023105 -0.383945 -0.918858 0.498899 -1.042513 -0.072226 -0.187025 -0.589871 1.084098 -0.986967 0.537859 0 0
22 -0.525116 0.187277 1.220635 -0.221678 0.589822 -1.115053 0.421737 0.656325 -0.098846 1.084098 -0.740109 1.853156 0 0
23 -0.282675 0.571926 -0.333097 2.860439 1.250860 1.434107 2.006573 0.362041 1.620288 0.896295 -0.274184 1.257508 0 0
24 1.898264 1.240876 0.302806 -0.427292 -0.709276 -0.159183 -0.474972 0.273501 -0.789908 -0.085745 0.868592 -0.492577 1 0
25 0.449834 -0.315494 -0.187659 -0.426842 -0.875213 0.916315 -0.212134 -0.503325 0.251438 0.499009 1.750633 0.286629 1 0
26 -0.745212 -0.457525 -0.261214 1.653228 0.471865 -0.532478 3.048431 -0.344057 0.968569 -1.690268 0.186111 -0.576484 1 0
27 -1.146960 2.737908 0.205319 -0.915593 1.224042 -1.080890 2.432039 -1.475399 1.443033 -0.136290 -1.161316 1.353237 0 0
28 -0.006670 2.177984 -0.364684 -0.232426 2.164165 -0.501370 0.481222 -0.462802 -0.495909 0.200246 -0.925963 -0.127350 0 0
29 -1.198149 1.245381 1.824748 -0.544122 2.094805 -0.986543 1.141795 -1.276929 1.620288 0.772653 -1.183303 -0.633811 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
225 -0.850302 -0.813520 -1.238258 0.389340 -0.820553 -0.147948 -0.867381 -1.187728 1.620288 -0.228859 0.676545 -1.166259 1 1
226 1.113887 -0.637363 -0.178882 -0.867049 -0.180374 -0.059762 -1.031280 1.075930 1.620288 -0.831212 -0.536781 -0.852284 1 1
227 -0.939216 -1.028651 -1.083821 0.671288 -1.104197 0.395233 -0.935964 -0.545426 -0.734897 -1.634988 1.750633 -0.991450 1 1
228 0.323318 -0.538560 1.959063 0.469737 -0.928167 0.315432 -0.838357 0.553622 -0.985928 -0.404420 0.017683 -0.868443 1 1
229 0.651351 0.768776 1.651644 0.613579 -0.672599 0.778338 -0.288893 1.735353 -0.698435 -0.803492 0.749774 -0.176628 1 1
230 1.208263 0.590562 -0.856200 -0.037362 -0.890995 0.154056 -0.224548 0.060386 1.620288 -0.577590 1.081262 0.137052 1 1
231 1.629732 -0.424079 -0.793853 -0.297522 0.876511 -0.502033 -0.540340 1.735353 1.179917 0.331082 -0.186926 0.043014 0 1
232 1.097162 0.374225 -0.897801 -0.315126 0.016346 0.367805 -0.397202 0.034201 0.736455 1.084098 1.111692 0.505493 1 1
233 1.452404 -0.818664 1.001952 0.094142 -1.157451 -0.673136 -1.031650 -0.681166 -1.578572 -1.040029 1.750633 -1.088779 1 1
234 0.687938 0.363684 -0.077785 1.495170 -0.946513 0.162091 -0.875163 -0.486609 -0.657582 -0.728212 1.750633 0.166071 1 1
235 -0.511818 -1.019067 1.937312 -0.923999 -1.199069 -1.157422 1.424573 -1.506879 -0.941792 1.084098 -0.598433 1.497327 0 1
236 -0.649452 1.110585 0.023607 -0.619494 -0.351503 -0.377758 0.774664 -1.037327 -0.008921 1.084098 -0.300447 -0.799951 0 1
237 -0.675917 0.864345 1.959063 -0.702632 0.767520 -0.242236 -0.320118 -0.889868 -0.543499 0.953653 -0.295863 0.184530 0 1
238 -0.820946 0.480728 -0.348445 1.706293 -0.634861 -0.548325 -0.658463 -1.446347 -0.595881 -1.346009 1.750633 -0.891882 1 1
239 0.335654 -0.570366 0.440736 2.255028 -0.777152 -0.336048 -0.104033 0.504513 -0.304387 -0.899407 1.750633 0.586226 1 1
240 0.772849 -0.288034 0.998235 2.707124 0.694491 1.600236 -0.599878 0.863164 1.620288 -0.793363 1.076398 0.313476 1 1
241 -0.725775 -0.253169 -1.058923 -0.019515 -0.367824 -0.703472 0.163373 0.093846 1.620288 0.712877 -0.410881 0.776774 0 1
242 -0.883133 0.370675 -1.102573 0.232760 -1.047240 -0.968298 0.151912 -0.346068 1.620288 -0.219994 -0.255762 0.629036 0 1
243 -0.504299 -0.158035 -0.129250 0.170764 0.127833 -0.424841 0.960604 0.067663 1.620288 0.679838 0.005478 0.547596 0 1
244 -0.829496 0.743464 1.959063 0.266679 0.653670 -1.095468 1.900161 1.007911 -0.442746 0.870766 -0.246038 1.728101 0 1
245 -1.203285 0.455171 0.496797 -0.873183 0.629642 -1.113864 1.271226 0.542153 -0.636367 1.084098 -1.140705 0.222521 0 1
246 -1.122600 -0.442839 1.824660 -0.819762 1.050840 -1.048446 1.937596 0.131208 -0.474964 0.536040 -0.800922 1.853511 0 1
247 -1.206944 -0.795159 -1.159900 -0.187089 0.233445 2.452569 0.183293 1.253589 -1.394630 0.753640 -1.083641 -1.163597 0 1
248 -0.501382 -1.030219 -0.612979 -0.923999 -0.608020 -0.863086 -0.124132 -0.944272 -1.081271 1.084098 -1.170461 -1.244399 0 1
249 -0.704093 -0.985145 1.199508 -0.321552 2.164165 -0.193249 -0.295412 -0.394034 0.032818 -1.672473 -0.724683 -1.067831 0 1
250 0.831957 -0.173367 1.636565 1.345345 -0.989257 0.826135 -0.824412 -0.072225 -0.255975 -1.005500 1.750633 -1.067130 1 1
251 -0.895156 -1.022380 -0.410545 2.237273 -1.199069 0.687783 -1.006468 -0.694581 -1.311235 -1.270219 1.750633 -1.233006 1 1
252 -0.546320 -0.631883 -0.800789 -0.187107 -1.179353 0.464606 -0.905192 -0.113592 0.167123 -0.843254 1.750633 -1.148945 1 1
253 -0.591349 -0.947758 -0.915262 -0.579179 0.089961 0.047756 -0.905554 -1.413215 -1.184716 1.084098 0.384684 -1.035788 0 1
254 -0.072947 -0.912155 0.150662 -0.246506 -1.198328 -0.779047 -1.031650 0.022522 -1.604883 -1.609572 1.750632 -1.247525 1 1

255 rows × 14 columns

In [79]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[79]:
<matplotlib.axes._subplots.AxesSubplot at 0x1ef62148208>
In [80]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[2]))

Gramma

ANN

In [435]:
X = df_n_ps_std_ch[2]
In [436]:
y = df_n_ps[2]['chosen']
In [437]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [438]:
X_train.shape
Out[438]:
(231, 12)
In [85]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [86]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [87]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [88]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20, 10), 'learning_rate_init': 0.003, 'max_iter': 100}, que permiten obtener un Accuracy de 81.39% y un Kappa del 20.50
Tiempo total: 29.47 minutos
In [439]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20,10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.003
epochs = 100
In [440]:
input_tensor = Input(shape = (n0,))
In [441]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [442]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [443]:
model.summary()
Model: "model_23"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_23 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_72 (Dense)             (None, 20)                260       
_________________________________________________________________
dense_73 (Dense)             (None, 10)                210       
_________________________________________________________________
dense_74 (Dense)             (None, 1)                 11        
=================================================================
Total params: 481
Trainable params: 481
Non-trainable params: 0
_________________________________________________________________
In [444]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 231 samples, validate on 78 samples
Epoch 1/100
231/231 [==============================] - 0s 805us/step - loss: 0.6917 - accuracy: 0.5714 - val_loss: 0.6342 - val_accuracy: 0.6795
Epoch 2/100
231/231 [==============================] - 0s 65us/step - loss: 0.6302 - accuracy: 0.7013 - val_loss: 0.5826 - val_accuracy: 0.7949
Epoch 3/100
231/231 [==============================] - 0s 61us/step - loss: 0.5899 - accuracy: 0.7706 - val_loss: 0.5455 - val_accuracy: 0.8205
Epoch 4/100
231/231 [==============================] - 0s 82us/step - loss: 0.5610 - accuracy: 0.7792 - val_loss: 0.5164 - val_accuracy: 0.8333
Epoch 5/100
231/231 [==============================] - 0s 78us/step - loss: 0.5344 - accuracy: 0.7965 - val_loss: 0.4901 - val_accuracy: 0.8333
Epoch 6/100
231/231 [==============================] - 0s 74us/step - loss: 0.5117 - accuracy: 0.7879 - val_loss: 0.4738 - val_accuracy: 0.8205
Epoch 7/100
231/231 [==============================] - 0s 56us/step - loss: 0.4959 - accuracy: 0.7792 - val_loss: 0.4605 - val_accuracy: 0.8333
Epoch 8/100
231/231 [==============================] - 0s 61us/step - loss: 0.4828 - accuracy: 0.7879 - val_loss: 0.4486 - val_accuracy: 0.8205
Epoch 9/100
231/231 [==============================] - 0s 52us/step - loss: 0.4786 - accuracy: 0.7879 - val_loss: 0.4402 - val_accuracy: 0.8333
Epoch 10/100
231/231 [==============================] - 0s 61us/step - loss: 0.4704 - accuracy: 0.7879 - val_loss: 0.4366 - val_accuracy: 0.8333
Epoch 11/100
231/231 [==============================] - 0s 52us/step - loss: 0.4685 - accuracy: 0.7879 - val_loss: 0.4392 - val_accuracy: 0.8333
Epoch 12/100
231/231 [==============================] - 0s 61us/step - loss: 0.4619 - accuracy: 0.7922 - val_loss: 0.4381 - val_accuracy: 0.8333
Epoch 13/100
231/231 [==============================] - 0s 61us/step - loss: 0.4587 - accuracy: 0.7922 - val_loss: 0.4367 - val_accuracy: 0.8333
Epoch 14/100
231/231 [==============================] - 0s 61us/step - loss: 0.4537 - accuracy: 0.7922 - val_loss: 0.4337 - val_accuracy: 0.8333

Epoch 00014: ReduceLROnPlateau reducing learning rate to 0.001500000013038516.
Epoch 15/100
231/231 [==============================] - 0s 56us/step - loss: 0.4488 - accuracy: 0.8009 - val_loss: 0.4314 - val_accuracy: 0.8333
Epoch 16/100
231/231 [==============================] - 0s 56us/step - loss: 0.4475 - accuracy: 0.7965 - val_loss: 0.4300 - val_accuracy: 0.8462
Epoch 17/100
231/231 [==============================] - 0s 61us/step - loss: 0.4476 - accuracy: 0.7965 - val_loss: 0.4287 - val_accuracy: 0.8462
Epoch 18/100
231/231 [==============================] - 0s 61us/step - loss: 0.4468 - accuracy: 0.7922 - val_loss: 0.4269 - val_accuracy: 0.8462
Epoch 19/100
231/231 [==============================] - 0s 61us/step - loss: 0.4428 - accuracy: 0.7879 - val_loss: 0.4270 - val_accuracy: 0.8462
Epoch 20/100
231/231 [==============================] - 0s 65us/step - loss: 0.4409 - accuracy: 0.7879 - val_loss: 0.4263 - val_accuracy: 0.8462
Epoch 21/100
231/231 [==============================] - 0s 100us/step - loss: 0.4385 - accuracy: 0.7922 - val_loss: 0.4275 - val_accuracy: 0.8333
Epoch 22/100
231/231 [==============================] - 0s 65us/step - loss: 0.4384 - accuracy: 0.7965 - val_loss: 0.4277 - val_accuracy: 0.8333
Epoch 23/100
231/231 [==============================] - 0s 61us/step - loss: 0.4364 - accuracy: 0.7965 - val_loss: 0.4266 - val_accuracy: 0.8333
Epoch 24/100
231/231 [==============================] - 0s 69us/step - loss: 0.4345 - accuracy: 0.7922 - val_loss: 0.4261 - val_accuracy: 0.8333
Epoch 25/100
231/231 [==============================] - 0s 61us/step - loss: 0.4323 - accuracy: 0.7835 - val_loss: 0.4240 - val_accuracy: 0.8462
Epoch 26/100
231/231 [==============================] - 0s 69us/step - loss: 0.4331 - accuracy: 0.7879 - val_loss: 0.4231 - val_accuracy: 0.8462

Epoch 00026: ReduceLROnPlateau reducing learning rate to 0.000750000006519258.
Epoch 27/100
231/231 [==============================] - 0s 61us/step - loss: 0.4299 - accuracy: 0.7879 - val_loss: 0.4234 - val_accuracy: 0.8462
Epoch 28/100
231/231 [==============================] - 0s 65us/step - loss: 0.4289 - accuracy: 0.7879 - val_loss: 0.4237 - val_accuracy: 0.8462
Epoch 29/100
231/231 [==============================] - 0s 65us/step - loss: 0.4275 - accuracy: 0.7922 - val_loss: 0.4241 - val_accuracy: 0.8462
Epoch 30/100
231/231 [==============================] - 0s 65us/step - loss: 0.4270 - accuracy: 0.7965 - val_loss: 0.4253 - val_accuracy: 0.8462
Epoch 31/100
231/231 [==============================] - 0s 56us/step - loss: 0.4262 - accuracy: 0.8052 - val_loss: 0.4270 - val_accuracy: 0.8462
Epoch 32/100
231/231 [==============================] - 0s 61us/step - loss: 0.4259 - accuracy: 0.8095 - val_loss: 0.4282 - val_accuracy: 0.8462
Epoch 33/100
231/231 [==============================] - 0s 61us/step - loss: 0.4248 - accuracy: 0.8095 - val_loss: 0.4278 - val_accuracy: 0.8462
Epoch 34/100
231/231 [==============================] - 0s 61us/step - loss: 0.4242 - accuracy: 0.8052 - val_loss: 0.4274 - val_accuracy: 0.8333
Epoch 35/100
231/231 [==============================] - 0s 61us/step - loss: 0.4231 - accuracy: 0.8009 - val_loss: 0.4269 - val_accuracy: 0.8333
Epoch 36/100
231/231 [==============================] - 0s 69us/step - loss: 0.4227 - accuracy: 0.8009 - val_loss: 0.4261 - val_accuracy: 0.8333

Epoch 00036: ReduceLROnPlateau reducing learning rate to 0.000375000003259629.
Epoch 37/100
231/231 [==============================] - 0s 56us/step - loss: 0.4215 - accuracy: 0.8009 - val_loss: 0.4259 - val_accuracy: 0.8333
Epoch 38/100
231/231 [==============================] - 0s 78us/step - loss: 0.4210 - accuracy: 0.8009 - val_loss: 0.4253 - val_accuracy: 0.8333
Epoch 39/100
231/231 [==============================] - 0s 87us/step - loss: 0.4205 - accuracy: 0.8009 - val_loss: 0.4246 - val_accuracy: 0.8333
Epoch 40/100
231/231 [==============================] - 0s 65us/step - loss: 0.4196 - accuracy: 0.8009 - val_loss: 0.4241 - val_accuracy: 0.8333
Epoch 41/100
231/231 [==============================] - 0s 65us/step - loss: 0.4193 - accuracy: 0.8052 - val_loss: 0.4236 - val_accuracy: 0.8333
Epoch 42/100
231/231 [==============================] - 0s 78us/step - loss: 0.4188 - accuracy: 0.8052 - val_loss: 0.4235 - val_accuracy: 0.8333
Epoch 43/100
231/231 [==============================] - 0s 74us/step - loss: 0.4184 - accuracy: 0.8052 - val_loss: 0.4243 - val_accuracy: 0.8333
Epoch 44/100
231/231 [==============================] - 0s 65us/step - loss: 0.4180 - accuracy: 0.8052 - val_loss: 0.4244 - val_accuracy: 0.8333
Epoch 45/100
231/231 [==============================] - 0s 65us/step - loss: 0.4177 - accuracy: 0.8009 - val_loss: 0.4237 - val_accuracy: 0.8333
Epoch 46/100
231/231 [==============================] - 0s 65us/step - loss: 0.4174 - accuracy: 0.8009 - val_loss: 0.4235 - val_accuracy: 0.8205

Epoch 00046: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145.
Epoch 47/100
231/231 [==============================] - 0s 65us/step - loss: 0.4170 - accuracy: 0.8009 - val_loss: 0.4232 - val_accuracy: 0.8205
Epoch 48/100
231/231 [==============================] - 0s 69us/step - loss: 0.4166 - accuracy: 0.8009 - val_loss: 0.4231 - val_accuracy: 0.8205
Epoch 49/100
231/231 [==============================] - 0s 65us/step - loss: 0.4164 - accuracy: 0.8009 - val_loss: 0.4232 - val_accuracy: 0.8205
Epoch 50/100
231/231 [==============================] - 0s 65us/step - loss: 0.4161 - accuracy: 0.8009 - val_loss: 0.4231 - val_accuracy: 0.8205
Epoch 51/100
231/231 [==============================] - 0s 61us/step - loss: 0.4159 - accuracy: 0.8009 - val_loss: 0.4232 - val_accuracy: 0.8333
Epoch 52/100
231/231 [==============================] - 0s 61us/step - loss: 0.4158 - accuracy: 0.8052 - val_loss: 0.4233 - val_accuracy: 0.8333
Epoch 53/100
231/231 [==============================] - 0s 61us/step - loss: 0.4155 - accuracy: 0.8052 - val_loss: 0.4236 - val_accuracy: 0.8205
Epoch 54/100
231/231 [==============================] - 0s 56us/step - loss: 0.4153 - accuracy: 0.8052 - val_loss: 0.4236 - val_accuracy: 0.8205
Epoch 55/100
231/231 [==============================] - 0s 61us/step - loss: 0.4151 - accuracy: 0.8009 - val_loss: 0.4235 - val_accuracy: 0.8205
Epoch 56/100
231/231 [==============================] - 0s 61us/step - loss: 0.4149 - accuracy: 0.8009 - val_loss: 0.4235 - val_accuracy: 0.8205

Epoch 00056: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05.
Epoch 57/100
231/231 [==============================] - 0s 61us/step - loss: 0.4147 - accuracy: 0.8009 - val_loss: 0.4236 - val_accuracy: 0.8205
Epoch 58/100
231/231 [==============================] - 0s 91us/step - loss: 0.4146 - accuracy: 0.8009 - val_loss: 0.4235 - val_accuracy: 0.8205
Epoch 59/100
231/231 [==============================] - 0s 61us/step - loss: 0.4145 - accuracy: 0.8009 - val_loss: 0.4235 - val_accuracy: 0.8205
Epoch 60/100
231/231 [==============================] - 0s 87us/step - loss: 0.4144 - accuracy: 0.8052 - val_loss: 0.4234 - val_accuracy: 0.8205
Epoch 61/100
231/231 [==============================] - 0s 65us/step - loss: 0.4143 - accuracy: 0.8052 - val_loss: 0.4234 - val_accuracy: 0.8205
Epoch 62/100
231/231 [==============================] - 0s 78us/step - loss: 0.4142 - accuracy: 0.8009 - val_loss: 0.4233 - val_accuracy: 0.8205
Epoch 63/100
231/231 [==============================] - 0s 78us/step - loss: 0.4141 - accuracy: 0.8052 - val_loss: 0.4231 - val_accuracy: 0.8333
Epoch 64/100
231/231 [==============================] - 0s 69us/step - loss: 0.4140 - accuracy: 0.8052 - val_loss: 0.4229 - val_accuracy: 0.8333
Epoch 65/100
231/231 [==============================] - 0s 95us/step - loss: 0.4138 - accuracy: 0.8052 - val_loss: 0.4228 - val_accuracy: 0.8333
Epoch 66/100
231/231 [==============================] - 0s 100us/step - loss: 0.4137 - accuracy: 0.8052 - val_loss: 0.4229 - val_accuracy: 0.8333

Epoch 00066: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05.
Epoch 67/100
231/231 [==============================] - 0s 87us/step - loss: 0.4137 - accuracy: 0.8052 - val_loss: 0.4228 - val_accuracy: 0.8333
Epoch 68/100
231/231 [==============================] - 0s 95us/step - loss: 0.4136 - accuracy: 0.8052 - val_loss: 0.4227 - val_accuracy: 0.8333
Epoch 69/100
231/231 [==============================] - 0s 82us/step - loss: 0.4136 - accuracy: 0.8052 - val_loss: 0.4227 - val_accuracy: 0.8333
Epoch 70/100
231/231 [==============================] - 0s 82us/step - loss: 0.4135 - accuracy: 0.8052 - val_loss: 0.4226 - val_accuracy: 0.8333
Epoch 71/100
231/231 [==============================] - 0s 82us/step - loss: 0.4134 - accuracy: 0.8052 - val_loss: 0.4225 - val_accuracy: 0.8333
Epoch 72/100
231/231 [==============================] - 0s 78us/step - loss: 0.4134 - accuracy: 0.8052 - val_loss: 0.4224 - val_accuracy: 0.8333
Epoch 73/100
231/231 [==============================] - 0s 82us/step - loss: 0.4133 - accuracy: 0.8009 - val_loss: 0.4224 - val_accuracy: 0.8333
Epoch 74/100
231/231 [==============================] - 0s 74us/step - loss: 0.4133 - accuracy: 0.8009 - val_loss: 0.4224 - val_accuracy: 0.8205
Epoch 75/100
231/231 [==============================] - 0s 121us/step - loss: 0.4132 - accuracy: 0.8009 - val_loss: 0.4224 - val_accuracy: 0.8205
Epoch 76/100
231/231 [==============================] - 0s 78us/step - loss: 0.4132 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205

Epoch 00076: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05.
Epoch 77/100
231/231 [==============================] - 0s 82us/step - loss: 0.4131 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 78/100
231/231 [==============================] - 0s 82us/step - loss: 0.4131 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 79/100
231/231 [==============================] - 0s 87us/step - loss: 0.4131 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 80/100
231/231 [==============================] - 0s 74us/step - loss: 0.4130 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 81/100
231/231 [==============================] - 0s 69us/step - loss: 0.4130 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 82/100
231/231 [==============================] - 0s 82us/step - loss: 0.4130 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 83/100
231/231 [==============================] - 0s 87us/step - loss: 0.4129 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 84/100
231/231 [==============================] - 0s 82us/step - loss: 0.4129 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 85/100
231/231 [==============================] - 0s 95us/step - loss: 0.4129 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 86/100
231/231 [==============================] - 0s 91us/step - loss: 0.4129 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205

Epoch 00086: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05.
Epoch 87/100
231/231 [==============================] - 0s 95us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 88/100
231/231 [==============================] - 0s 104us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 89/100
231/231 [==============================] - 0s 78us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4223 - val_accuracy: 0.8205
Epoch 90/100
231/231 [==============================] - 0s 95us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 91/100
231/231 [==============================] - 0s 78us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 92/100
231/231 [==============================] - 0s 91us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 93/100
231/231 [==============================] - 0s 78us/step - loss: 0.4128 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 94/100
231/231 [==============================] - 0s 78us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 95/100
231/231 [==============================] - 0s 82us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 96/100
231/231 [==============================] - 0s 78us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205

Epoch 00096: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06.
Epoch 97/100
231/231 [==============================] - 0s 74us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 98/100
231/231 [==============================] - 0s 74us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 99/100
231/231 [==============================] - 0s 78us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
Epoch 100/100
231/231 [==============================] - 0s 91us/step - loss: 0.4127 - accuracy: 0.8009 - val_loss: 0.4222 - val_accuracy: 0.8205
In [445]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 100)
In [446]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
78/78 [==============================] - 0s 64us/step
test loss: 0.42215611384465146, test accuracy: 0.8205128312110901
In [447]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.7087053571428572
In [448]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.2155172413793104

KMeans

In [100]:
X
Out[100]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12
0 1.984589 -0.654155 -0.792745 -0.859479 0.177345 -0.665615 -0.552507 0.352420 -0.300142 0.769752 -0.504858 0.181350
1 -0.102317 1.607758 0.006886 -0.548179 0.490499 -0.344630 0.877114 -0.585463 1.429327 0.172897 0.202254 1.373180
2 -0.439474 2.028691 -0.353938 2.078331 -1.039184 -0.467977 1.821419 -1.268548 -0.701337 -1.697571 -0.326854 0.472916
3 -0.645076 0.109508 -0.651900 1.301219 -0.609256 -0.080210 1.087784 0.361091 1.429328 -0.614952 0.382530 0.101301
4 -0.301057 0.336491 1.322422 0.959162 -0.936732 -0.362802 1.110344 0.010511 1.429327 -0.501805 0.442290 -0.322884
5 -1.259648 0.791285 0.394989 -0.842002 0.272018 -0.902333 1.319181 -0.909587 0.270086 1.385370 -1.157401 -0.786117
6 -0.473744 0.544499 0.562651 -0.121769 0.552950 -0.248724 0.899872 0.460196 1.152068 1.385371 -0.774641 -0.707276
7 -0.774266 0.899101 0.080156 -0.046730 0.386542 -0.597906 0.929468 0.498309 1.429327 0.480853 -0.478808 -0.428642
8 1.984589 -0.912964 1.574884 -0.842897 -0.520036 -0.919448 -0.932145 1.867087 -0.601601 -0.679714 -0.985387 -0.175334
9 1.984589 -0.409991 1.175249 -0.543262 -0.453426 -0.170551 -0.874287 0.438503 -0.647564 0.510081 -0.815886 -0.260353
10 1.984589 -1.189009 -0.443729 -1.134391 -0.469034 -0.729802 -1.117639 -0.299959 -1.382187 0.098391 -1.243653 -1.042776
11 1.425631 -0.405095 1.943010 -0.275254 -0.249374 0.219391 -0.780272 2.124774 -0.587222 -0.149453 -0.440426 -0.501966
12 -0.175927 -1.348663 -0.477174 -1.254474 -0.853911 2.667488 -1.049255 -0.222134 -1.773507 -0.580300 -0.908675 -1.449154
13 -0.085508 -0.457470 0.184100 -0.334355 -0.765637 1.103120 -0.823425 2.124774 -0.182534 -0.465675 1.179407 -0.204100
14 0.250965 2.028690 -1.244012 0.617467 -0.882089 2.044217 1.871164 -1.088962 -0.995141 -1.735168 0.105923 -1.470599
15 0.541805 -0.001425 -0.925880 0.238435 -0.575240 2.198189 -0.519574 -0.694298 0.048619 -1.297201 1.668396 -0.729377
16 1.075493 1.812588 -0.957979 -0.104324 -0.731780 1.782624 -0.873902 -0.910773 0.252506 -1.403241 1.668396 -0.957208
17 1.162343 -1.410603 1.107861 -1.278878 0.652589 -1.189878 -0.607570 2.124774 -1.773507 -1.635072 -1.485428 0.776204
18 -0.350570 -0.306746 0.672887 -0.541074 -0.262104 -0.270106 -0.268765 2.124774 -0.322479 0.633526 0.782517 0.912903
19 1.984589 -1.166218 -0.302121 -0.837423 1.444010 -0.977651 -0.769393 0.971765 -1.024957 0.065328 -0.751049 1.582401
20 -0.344429 0.397628 0.492928 0.088349 1.259038 -0.375019 0.861329 0.835025 0.519729 1.385370 0.302340 1.587608
21 -0.665123 -0.712314 -0.800151 -0.469685 0.600390 -0.337541 0.573835 0.311324 0.473167 1.385371 -0.000823 0.495499
22 -0.034726 0.631798 1.392275 0.218558 0.576512 -0.244433 1.034652 0.817298 0.794999 1.129188 -0.194205 1.762490
23 1.984589 -0.007008 1.098575 0.079477 0.064847 1.686726 0.117647 1.534220 -0.572812 0.064077 1.117174 0.106404
24 0.863870 -0.225984 0.529571 -0.208265 -0.709105 0.215012 -0.123712 2.124775 0.286024 0.404840 0.142872 0.545621
25 1.984589 -0.637932 0.518394 -0.066070 0.047258 1.915570 -0.370971 1.598931 -0.707575 -0.227660 0.935568 -0.410777
26 -0.534296 -0.293829 -0.533721 2.591888 -0.459134 0.157203 -0.637435 -0.966788 -1.041968 -1.536181 1.668394 -1.235485
27 -1.305187 -1.249514 -0.918547 0.558531 -0.810228 -0.527321 -0.867245 1.052965 1.429326 -0.486176 -1.156630 -1.447430
28 -0.702071 2.028691 -0.850117 0.070237 1.467057 -0.296061 1.830975 -0.993617 0.591063 0.197858 -0.843315 1.002836
29 0.432566 0.860284 -1.227202 0.670559 -1.159673 0.418544 -0.996449 -0.930246 0.373902 -1.168053 1.668396 -1.329085
... ... ... ... ... ... ... ... ... ... ... ... ...
279 -1.348839 0.139880 -1.253885 -1.275537 -0.280640 -1.141193 -0.892432 -1.344572 1.429327 0.195292 -1.485428 -1.377643
280 -0.179872 2.028690 -0.263275 -0.310922 1.069421 -0.450122 2.019519 -0.993600 -0.104343 0.791611 -0.318981 0.603525
281 0.469201 -0.567649 -0.709387 0.607882 -0.765402 -0.261128 -0.801727 0.179241 1.429328 -0.204750 0.800232 -0.581854
282 0.543320 -0.456381 -0.634102 0.325918 -0.664498 -0.167886 -0.687116 -0.079271 1.429328 -0.602810 1.035451 -0.583375
283 0.477794 -0.769583 -0.735746 1.473847 -0.762374 0.780975 -0.774212 0.728189 1.429327 -0.365422 1.597677 -0.544492
284 0.222631 -0.766506 0.927101 -0.670166 1.411449 -0.709842 0.247985 2.124775 -0.967118 0.016153 -0.867689 0.553112
285 1.911041 -1.385949 0.489432 -1.277915 0.744174 -1.189878 -0.732335 2.124775 -1.427934 0.826977 -0.913373 1.545683
286 0.998963 -0.323703 1.401654 -0.268532 1.829759 -0.704029 -0.260658 2.124775 -0.758056 0.093732 -0.465792 1.060927
287 -0.510470 -0.039911 1.963115 -0.642206 0.369415 -0.773032 -0.797277 -0.297193 -0.872803 1.385370 -0.488415 1.418495
288 -0.254650 0.178893 1.558633 -0.039151 0.587666 -0.615370 -0.017793 0.430361 0.049355 1.129887 -0.078385 1.762490
289 -0.808854 -0.341264 1.351164 -0.628500 0.452102 -0.907851 -0.256562 0.160761 -0.607798 0.915654 -0.519960 1.762490
290 1.984589 -0.481795 0.508510 -0.575951 -0.779282 0.435357 -0.690793 0.480685 -0.370618 0.452005 -0.553849 -0.500601
291 0.998287 -0.217204 0.367127 0.044444 -0.456589 0.906812 -0.366517 1.231917 0.553998 1.385370 0.680908 0.240103
292 0.798451 -0.538113 1.041133 -0.029389 -0.595522 0.670339 -0.544354 1.434857 -0.084621 1.385370 -0.330343 -0.433781
293 -0.943702 0.737028 -0.728275 1.208260 -0.076623 -0.869741 -0.566060 -0.607766 1.429327 -0.367929 -0.514004 -0.152881
294 -0.783173 2.028690 -0.694537 0.534761 0.700872 -0.657312 0.593459 -0.865193 0.804282 0.411026 -0.455890 -0.114253
295 -1.353539 0.236507 -1.253885 0.505804 2.532661 -1.137257 2.639114 -1.344572 -0.623826 -1.059108 -1.485217 -0.664024
296 -0.814494 -0.644175 -0.746555 -0.089905 1.458628 -0.329786 -0.180452 -0.280519 -0.556615 1.385371 -0.562439 0.215725
297 -1.328790 -0.867949 -1.235248 -1.143782 -0.824785 -1.189878 0.173069 -1.209548 -0.747695 0.835692 -1.224526 1.762490
298 -0.492926 0.191754 0.111709 -0.525002 1.181452 -0.578483 1.076554 0.005169 -0.283691 1.385370 -0.639443 0.647082
299 -0.170158 1.429318 0.352435 -0.389217 0.358405 0.174620 1.813232 0.463259 1.229264 1.385371 0.316814 0.934409
300 -0.735769 1.192104 0.781909 -0.385227 1.393252 -0.120730 0.406288 -0.281463 0.586122 1.385371 -0.245906 0.425382
301 -0.019554 1.160350 0.498593 -0.197460 2.311770 0.265997 2.684564 -0.481065 -0.409788 0.174802 -0.763379 0.571031
302 1.400364 -1.135263 -1.004589 -0.298465 -1.155209 -0.890738 -0.994393 0.530779 1.429327 -0.891882 0.281780 -1.060312
303 0.689113 -1.410603 -1.223892 2.775857 -1.269849 0.014549 -1.133868 1.868258 0.446129 -1.735168 -0.154717 -1.470599
304 0.922331 -1.397035 -0.464459 0.077595 -1.183685 -0.909669 -1.052482 0.619950 1.429327 -1.524369 -0.354957 -1.435964
305 0.750767 -0.421453 -0.860282 0.506761 -0.768372 0.918258 -0.755944 -0.644496 1.257549 -0.509585 1.668396 -0.681109
306 0.961272 -0.181417 -0.598543 1.120314 -0.794696 0.953980 -0.773921 -0.695204 1.429327 0.092550 1.404805 -0.528387
307 -0.525215 1.812541 -0.767815 -0.363920 1.274741 0.174788 0.980181 -0.048116 -0.124203 1.385370 -0.185668 0.000363
308 -1.312658 -1.394783 1.272388 -1.278878 -1.269849 2.667487 -1.096080 1.886802 -1.773507 1.153659 0.035789 -1.470599

309 rows × 12 columns

In [101]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[101]:
[3708.0,
 2878.224392689417,
 2409.0313876777413,
 2245.2681343684408,
 2096.2730590487927,
 1980.9081903971983,
 1884.1812168752008,
 1782.3304172674002,
 1692.3482768291478,
 1629.7037390768492,
 1586.8745376225052,
 1530.5845891254487,
 1455.503322263105,
 1429.3496938708681]
In [102]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[102]:
[<matplotlib.lines.Line2D at 0x1ef622f5c18>]

K=2

In [103]:
kmeans_ch = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[103]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [104]:
kmeans_ch.labels_
Out[104]:
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
       1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
       1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1,
       1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1,
       0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0,
       0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1,
       1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0,
       0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0,
       0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1,
       1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1,
       1])
In [105]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[105]:
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
       1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
       1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1,
       1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1,
       0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0,
       0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1,
       1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0,
       0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0,
       0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1,
       1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1,
       1])
In [106]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [107]:
X
Out[107]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 Cluster chosen
0 1.984589 -0.654155 -0.792745 -0.859479 0.177345 -0.665615 -0.552507 0.352420 -0.300142 0.769752 -0.504858 0.181350 1 0
1 -0.102317 1.607758 0.006886 -0.548179 0.490499 -0.344630 0.877114 -0.585463 1.429327 0.172897 0.202254 1.373180 1 0
2 -0.439474 2.028691 -0.353938 2.078331 -1.039184 -0.467977 1.821419 -1.268548 -0.701337 -1.697571 -0.326854 0.472916 0 0
3 -0.645076 0.109508 -0.651900 1.301219 -0.609256 -0.080210 1.087784 0.361091 1.429328 -0.614952 0.382530 0.101301 0 0
4 -0.301057 0.336491 1.322422 0.959162 -0.936732 -0.362802 1.110344 0.010511 1.429327 -0.501805 0.442290 -0.322884 0 0
5 -1.259648 0.791285 0.394989 -0.842002 0.272018 -0.902333 1.319181 -0.909587 0.270086 1.385370 -1.157401 -0.786117 1 0
6 -0.473744 0.544499 0.562651 -0.121769 0.552950 -0.248724 0.899872 0.460196 1.152068 1.385371 -0.774641 -0.707276 1 0
7 -0.774266 0.899101 0.080156 -0.046730 0.386542 -0.597906 0.929468 0.498309 1.429327 0.480853 -0.478808 -0.428642 1 0
8 1.984589 -0.912964 1.574884 -0.842897 -0.520036 -0.919448 -0.932145 1.867087 -0.601601 -0.679714 -0.985387 -0.175334 1 0
9 1.984589 -0.409991 1.175249 -0.543262 -0.453426 -0.170551 -0.874287 0.438503 -0.647564 0.510081 -0.815886 -0.260353 1 0
10 1.984589 -1.189009 -0.443729 -1.134391 -0.469034 -0.729802 -1.117639 -0.299959 -1.382187 0.098391 -1.243653 -1.042776 1 0
11 1.425631 -0.405095 1.943010 -0.275254 -0.249374 0.219391 -0.780272 2.124774 -0.587222 -0.149453 -0.440426 -0.501966 1 0
12 -0.175927 -1.348663 -0.477174 -1.254474 -0.853911 2.667488 -1.049255 -0.222134 -1.773507 -0.580300 -0.908675 -1.449154 0 0
13 -0.085508 -0.457470 0.184100 -0.334355 -0.765637 1.103120 -0.823425 2.124774 -0.182534 -0.465675 1.179407 -0.204100 0 0
14 0.250965 2.028690 -1.244012 0.617467 -0.882089 2.044217 1.871164 -1.088962 -0.995141 -1.735168 0.105923 -1.470599 0 0
15 0.541805 -0.001425 -0.925880 0.238435 -0.575240 2.198189 -0.519574 -0.694298 0.048619 -1.297201 1.668396 -0.729377 0 0
16 1.075493 1.812588 -0.957979 -0.104324 -0.731780 1.782624 -0.873902 -0.910773 0.252506 -1.403241 1.668396 -0.957208 0 0
17 1.162343 -1.410603 1.107861 -1.278878 0.652589 -1.189878 -0.607570 2.124774 -1.773507 -1.635072 -1.485428 0.776204 1 0
18 -0.350570 -0.306746 0.672887 -0.541074 -0.262104 -0.270106 -0.268765 2.124774 -0.322479 0.633526 0.782517 0.912903 1 0
19 1.984589 -1.166218 -0.302121 -0.837423 1.444010 -0.977651 -0.769393 0.971765 -1.024957 0.065328 -0.751049 1.582401 1 0
20 -0.344429 0.397628 0.492928 0.088349 1.259038 -0.375019 0.861329 0.835025 0.519729 1.385370 0.302340 1.587608 1 0
21 -0.665123 -0.712314 -0.800151 -0.469685 0.600390 -0.337541 0.573835 0.311324 0.473167 1.385371 -0.000823 0.495499 1 0
22 -0.034726 0.631798 1.392275 0.218558 0.576512 -0.244433 1.034652 0.817298 0.794999 1.129188 -0.194205 1.762490 1 0
23 1.984589 -0.007008 1.098575 0.079477 0.064847 1.686726 0.117647 1.534220 -0.572812 0.064077 1.117174 0.106404 0 0
24 0.863870 -0.225984 0.529571 -0.208265 -0.709105 0.215012 -0.123712 2.124775 0.286024 0.404840 0.142872 0.545621 1 0
25 1.984589 -0.637932 0.518394 -0.066070 0.047258 1.915570 -0.370971 1.598931 -0.707575 -0.227660 0.935568 -0.410777 0 0
26 -0.534296 -0.293829 -0.533721 2.591888 -0.459134 0.157203 -0.637435 -0.966788 -1.041968 -1.536181 1.668394 -1.235485 0 0
27 -1.305187 -1.249514 -0.918547 0.558531 -0.810228 -0.527321 -0.867245 1.052965 1.429326 -0.486176 -1.156630 -1.447430 0 0
28 -0.702071 2.028691 -0.850117 0.070237 1.467057 -0.296061 1.830975 -0.993617 0.591063 0.197858 -0.843315 1.002836 1 0
29 0.432566 0.860284 -1.227202 0.670559 -1.159673 0.418544 -0.996449 -0.930246 0.373902 -1.168053 1.668396 -1.329085 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
279 -1.348839 0.139880 -1.253885 -1.275537 -0.280640 -1.141193 -0.892432 -1.344572 1.429327 0.195292 -1.485428 -1.377643 1 1
280 -0.179872 2.028690 -0.263275 -0.310922 1.069421 -0.450122 2.019519 -0.993600 -0.104343 0.791611 -0.318981 0.603525 1 1
281 0.469201 -0.567649 -0.709387 0.607882 -0.765402 -0.261128 -0.801727 0.179241 1.429328 -0.204750 0.800232 -0.581854 0 1
282 0.543320 -0.456381 -0.634102 0.325918 -0.664498 -0.167886 -0.687116 -0.079271 1.429328 -0.602810 1.035451 -0.583375 0 1
283 0.477794 -0.769583 -0.735746 1.473847 -0.762374 0.780975 -0.774212 0.728189 1.429327 -0.365422 1.597677 -0.544492 0 1
284 0.222631 -0.766506 0.927101 -0.670166 1.411449 -0.709842 0.247985 2.124775 -0.967118 0.016153 -0.867689 0.553112 1 1
285 1.911041 -1.385949 0.489432 -1.277915 0.744174 -1.189878 -0.732335 2.124775 -1.427934 0.826977 -0.913373 1.545683 1 1
286 0.998963 -0.323703 1.401654 -0.268532 1.829759 -0.704029 -0.260658 2.124775 -0.758056 0.093732 -0.465792 1.060927 1 1
287 -0.510470 -0.039911 1.963115 -0.642206 0.369415 -0.773032 -0.797277 -0.297193 -0.872803 1.385370 -0.488415 1.418495 1 1
288 -0.254650 0.178893 1.558633 -0.039151 0.587666 -0.615370 -0.017793 0.430361 0.049355 1.129887 -0.078385 1.762490 1 1
289 -0.808854 -0.341264 1.351164 -0.628500 0.452102 -0.907851 -0.256562 0.160761 -0.607798 0.915654 -0.519960 1.762490 1 1
290 1.984589 -0.481795 0.508510 -0.575951 -0.779282 0.435357 -0.690793 0.480685 -0.370618 0.452005 -0.553849 -0.500601 1 1
291 0.998287 -0.217204 0.367127 0.044444 -0.456589 0.906812 -0.366517 1.231917 0.553998 1.385370 0.680908 0.240103 0 1
292 0.798451 -0.538113 1.041133 -0.029389 -0.595522 0.670339 -0.544354 1.434857 -0.084621 1.385370 -0.330343 -0.433781 1 1
293 -0.943702 0.737028 -0.728275 1.208260 -0.076623 -0.869741 -0.566060 -0.607766 1.429327 -0.367929 -0.514004 -0.152881 0 1
294 -0.783173 2.028690 -0.694537 0.534761 0.700872 -0.657312 0.593459 -0.865193 0.804282 0.411026 -0.455890 -0.114253 1 1
295 -1.353539 0.236507 -1.253885 0.505804 2.532661 -1.137257 2.639114 -1.344572 -0.623826 -1.059108 -1.485217 -0.664024 1 1
296 -0.814494 -0.644175 -0.746555 -0.089905 1.458628 -0.329786 -0.180452 -0.280519 -0.556615 1.385371 -0.562439 0.215725 1 1
297 -1.328790 -0.867949 -1.235248 -1.143782 -0.824785 -1.189878 0.173069 -1.209548 -0.747695 0.835692 -1.224526 1.762490 1 1
298 -0.492926 0.191754 0.111709 -0.525002 1.181452 -0.578483 1.076554 0.005169 -0.283691 1.385370 -0.639443 0.647082 1 1
299 -0.170158 1.429318 0.352435 -0.389217 0.358405 0.174620 1.813232 0.463259 1.229264 1.385371 0.316814 0.934409 1 1
300 -0.735769 1.192104 0.781909 -0.385227 1.393252 -0.120730 0.406288 -0.281463 0.586122 1.385371 -0.245906 0.425382 1 1
301 -0.019554 1.160350 0.498593 -0.197460 2.311770 0.265997 2.684564 -0.481065 -0.409788 0.174802 -0.763379 0.571031 1 1
302 1.400364 -1.135263 -1.004589 -0.298465 -1.155209 -0.890738 -0.994393 0.530779 1.429327 -0.891882 0.281780 -1.060312 0 1
303 0.689113 -1.410603 -1.223892 2.775857 -1.269849 0.014549 -1.133868 1.868258 0.446129 -1.735168 -0.154717 -1.470599 0 1
304 0.922331 -1.397035 -0.464459 0.077595 -1.183685 -0.909669 -1.052482 0.619950 1.429327 -1.524369 -0.354957 -1.435964 0 1
305 0.750767 -0.421453 -0.860282 0.506761 -0.768372 0.918258 -0.755944 -0.644496 1.257549 -0.509585 1.668396 -0.681109 0 1
306 0.961272 -0.181417 -0.598543 1.120314 -0.794696 0.953980 -0.773921 -0.695204 1.429327 0.092550 1.404805 -0.528387 0 1
307 -0.525215 1.812541 -0.767815 -0.363920 1.274741 0.174788 0.980181 -0.048116 -0.124203 1.385370 -0.185668 0.000363 1 1
308 -1.312658 -1.394783 1.272388 -1.278878 -1.269849 2.667487 -1.096080 1.886802 -1.773507 1.153659 0.035789 -1.470599 1 1

309 rows × 14 columns

In [108]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[108]:
<matplotlib.axes._subplots.AxesSubplot at 0x1ef62325320>
In [109]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[3]))

Hotel Marrakech

ANN

In [449]:
X = df_n_ps_std_ch[3]
In [450]:
y = df_n_ps[3]['chosen']
In [451]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [452]:
X_train.shape
Out[452]:
(139, 12)
In [114]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [115]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [116]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [117]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30, 20, 10), 'learning_rate_init': 0.005, 'max_iter': 400}, que permiten obtener un Accuracy de 74.82% y un Kappa del 36.52
Tiempo total: 20.79 minutos
In [453]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [30,20,10]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.005
epochs = 400
In [454]:
input_tensor = Input(shape = (n0,))
In [455]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [456]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [457]:
model.summary()
Model: "model_24"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_24 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_75 (Dense)             (None, 30)                390       
_________________________________________________________________
dense_76 (Dense)             (None, 20)                620       
_________________________________________________________________
dense_77 (Dense)             (None, 10)                210       
_________________________________________________________________
dense_78 (Dense)             (None, 1)                 11        
=================================================================
Total params: 1,231
Trainable params: 1,231
Non-trainable params: 0
_________________________________________________________________
In [458]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test),
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 139 samples, validate on 47 samples
Epoch 1/400
139/139 [==============================] - 0s 2ms/step - loss: 0.6839 - accuracy: 0.5612 - val_loss: 0.6643 - val_accuracy: 0.6596
Epoch 2/400
139/139 [==============================] - 0s 101us/step - loss: 0.6139 - accuracy: 0.6978 - val_loss: 0.6457 - val_accuracy: 0.5745
Epoch 3/400
139/139 [==============================] - 0s 86us/step - loss: 0.5802 - accuracy: 0.7050 - val_loss: 0.6362 - val_accuracy: 0.6383
Epoch 4/400
139/139 [==============================] - 0s 79us/step - loss: 0.5585 - accuracy: 0.7050 - val_loss: 0.6262 - val_accuracy: 0.6809
Epoch 5/400
139/139 [==============================] - 0s 79us/step - loss: 0.5379 - accuracy: 0.7266 - val_loss: 0.6363 - val_accuracy: 0.7234
Epoch 6/400
139/139 [==============================] - 0s 86us/step - loss: 0.5218 - accuracy: 0.7554 - val_loss: 0.6389 - val_accuracy: 0.7021
Epoch 7/400
139/139 [==============================] - 0s 72us/step - loss: 0.5042 - accuracy: 0.7410 - val_loss: 0.6527 - val_accuracy: 0.6809
Epoch 8/400
139/139 [==============================] - 0s 86us/step - loss: 0.4917 - accuracy: 0.7770 - val_loss: 0.6756 - val_accuracy: 0.6809
Epoch 9/400
139/139 [==============================] - 0s 86us/step - loss: 0.4787 - accuracy: 0.7986 - val_loss: 0.6979 - val_accuracy: 0.6170
Epoch 10/400
139/139 [==============================] - 0s 79us/step - loss: 0.4664 - accuracy: 0.8201 - val_loss: 0.6851 - val_accuracy: 0.6170
Epoch 11/400
139/139 [==============================] - 0s 93us/step - loss: 0.4505 - accuracy: 0.8345 - val_loss: 0.6736 - val_accuracy: 0.6170
Epoch 12/400
139/139 [==============================] - 0s 101us/step - loss: 0.4432 - accuracy: 0.8273 - val_loss: 0.6798 - val_accuracy: 0.6809
Epoch 13/400
139/139 [==============================] - 0s 86us/step - loss: 0.4199 - accuracy: 0.8129 - val_loss: 0.6860 - val_accuracy: 0.6809
Epoch 14/400
139/139 [==============================] - 0s 79us/step - loss: 0.4032 - accuracy: 0.8273 - val_loss: 0.6889 - val_accuracy: 0.6170
Epoch 15/400
139/139 [==============================] - 0s 79us/step - loss: 0.3848 - accuracy: 0.8489 - val_loss: 0.6918 - val_accuracy: 0.6383

Epoch 00015: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455.
Epoch 16/400
139/139 [==============================] - 0s 86us/step - loss: 0.3686 - accuracy: 0.8489 - val_loss: 0.7019 - val_accuracy: 0.6170
Epoch 17/400
139/139 [==============================] - 0s 72us/step - loss: 0.3604 - accuracy: 0.8489 - val_loss: 0.7117 - val_accuracy: 0.6170
Epoch 18/400
139/139 [==============================] - 0s 79us/step - loss: 0.3539 - accuracy: 0.8777 - val_loss: 0.7183 - val_accuracy: 0.6170
Epoch 19/400
139/139 [==============================] - 0s 79us/step - loss: 0.3441 - accuracy: 0.8777 - val_loss: 0.7249 - val_accuracy: 0.6170
Epoch 20/400
139/139 [==============================] - 0s 72us/step - loss: 0.3356 - accuracy: 0.8705 - val_loss: 0.7232 - val_accuracy: 0.6170
Epoch 21/400
139/139 [==============================] - 0s 79us/step - loss: 0.3259 - accuracy: 0.8561 - val_loss: 0.7256 - val_accuracy: 0.6383
Epoch 22/400
139/139 [==============================] - 0s 79us/step - loss: 0.3194 - accuracy: 0.8777 - val_loss: 0.7288 - val_accuracy: 0.6383
Epoch 23/400
139/139 [==============================] - 0s 72us/step - loss: 0.3080 - accuracy: 0.8777 - val_loss: 0.7311 - val_accuracy: 0.6383
Epoch 24/400
139/139 [==============================] - 0s 79us/step - loss: 0.2987 - accuracy: 0.8849 - val_loss: 0.7261 - val_accuracy: 0.6383
Epoch 25/400
139/139 [==============================] - 0s 72us/step - loss: 0.2921 - accuracy: 0.8993 - val_loss: 0.7177 - val_accuracy: 0.6170

Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228.
Epoch 26/400
139/139 [==============================] - 0s 86us/step - loss: 0.2848 - accuracy: 0.8921 - val_loss: 0.7105 - val_accuracy: 0.6596
Epoch 27/400
139/139 [==============================] - 0s 129us/step - loss: 0.2794 - accuracy: 0.8921 - val_loss: 0.7090 - val_accuracy: 0.6809
Epoch 28/400
139/139 [==============================] - 0s 101us/step - loss: 0.2732 - accuracy: 0.8993 - val_loss: 0.7176 - val_accuracy: 0.6596
Epoch 29/400
139/139 [==============================] - 0s 79us/step - loss: 0.2676 - accuracy: 0.9065 - val_loss: 0.7270 - val_accuracy: 0.6596
Epoch 30/400
139/139 [==============================] - 0s 86us/step - loss: 0.2633 - accuracy: 0.9065 - val_loss: 0.7343 - val_accuracy: 0.6596
Epoch 31/400
139/139 [==============================] - 0s 79us/step - loss: 0.2585 - accuracy: 0.9065 - val_loss: 0.7410 - val_accuracy: 0.6383
Epoch 32/400
139/139 [==============================] - 0s 137us/step - loss: 0.2539 - accuracy: 0.8993 - val_loss: 0.7384 - val_accuracy: 0.6596
Epoch 33/400
139/139 [==============================] - 0s 93us/step - loss: 0.2484 - accuracy: 0.9065 - val_loss: 0.7357 - val_accuracy: 0.6596
Epoch 34/400
139/139 [==============================] - 0s 79us/step - loss: 0.2447 - accuracy: 0.9065 - val_loss: 0.7319 - val_accuracy: 0.6596
Epoch 35/400
139/139 [==============================] - 0s 79us/step - loss: 0.2404 - accuracy: 0.9137 - val_loss: 0.7283 - val_accuracy: 0.6596

Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614.
Epoch 36/400
139/139 [==============================] - 0s 86us/step - loss: 0.2355 - accuracy: 0.9137 - val_loss: 0.7289 - val_accuracy: 0.6596
Epoch 37/400
139/139 [==============================] - 0s 72us/step - loss: 0.2330 - accuracy: 0.9137 - val_loss: 0.7297 - val_accuracy: 0.6596
Epoch 38/400
139/139 [==============================] - 0s 86us/step - loss: 0.2305 - accuracy: 0.9209 - val_loss: 0.7305 - val_accuracy: 0.6596
Epoch 39/400
139/139 [==============================] - 0s 86us/step - loss: 0.2285 - accuracy: 0.9209 - val_loss: 0.7322 - val_accuracy: 0.6596
Epoch 40/400
139/139 [==============================] - 0s 72us/step - loss: 0.2262 - accuracy: 0.9281 - val_loss: 0.7341 - val_accuracy: 0.6383
Epoch 41/400
139/139 [==============================] - 0s 79us/step - loss: 0.2233 - accuracy: 0.9281 - val_loss: 0.7386 - val_accuracy: 0.6383
Epoch 42/400
139/139 [==============================] - 0s 79us/step - loss: 0.2221 - accuracy: 0.9209 - val_loss: 0.7464 - val_accuracy: 0.6596
Epoch 43/400
139/139 [==============================] - 0s 72us/step - loss: 0.2197 - accuracy: 0.9209 - val_loss: 0.7529 - val_accuracy: 0.6596
Epoch 44/400
139/139 [==============================] - 0s 86us/step - loss: 0.2173 - accuracy: 0.9209 - val_loss: 0.7492 - val_accuracy: 0.6383
Epoch 45/400
139/139 [==============================] - 0s 72us/step - loss: 0.2152 - accuracy: 0.9209 - val_loss: 0.7426 - val_accuracy: 0.6596

Epoch 00045: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807.
Epoch 46/400
139/139 [==============================] - 0s 72us/step - loss: 0.2123 - accuracy: 0.9353 - val_loss: 0.7405 - val_accuracy: 0.6596
Epoch 47/400
139/139 [==============================] - 0s 72us/step - loss: 0.2114 - accuracy: 0.9353 - val_loss: 0.7371 - val_accuracy: 0.6596
Epoch 48/400
139/139 [==============================] - 0s 72us/step - loss: 0.2107 - accuracy: 0.9496 - val_loss: 0.7365 - val_accuracy: 0.6809
Epoch 49/400
139/139 [==============================] - 0s 72us/step - loss: 0.2092 - accuracy: 0.9496 - val_loss: 0.7379 - val_accuracy: 0.6809
Epoch 50/400
139/139 [==============================] - 0s 72us/step - loss: 0.2079 - accuracy: 0.9496 - val_loss: 0.7397 - val_accuracy: 0.6809
Epoch 51/400
139/139 [==============================] - 0s 72us/step - loss: 0.2068 - accuracy: 0.9496 - val_loss: 0.7412 - val_accuracy: 0.6596
Epoch 52/400
139/139 [==============================] - 0s 72us/step - loss: 0.2058 - accuracy: 0.9496 - val_loss: 0.7400 - val_accuracy: 0.6596
Epoch 53/400
139/139 [==============================] - 0s 79us/step - loss: 0.2046 - accuracy: 0.9496 - val_loss: 0.7398 - val_accuracy: 0.6596
Epoch 54/400
139/139 [==============================] - 0s 79us/step - loss: 0.2037 - accuracy: 0.9496 - val_loss: 0.7395 - val_accuracy: 0.6596
Epoch 55/400
139/139 [==============================] - 0s 86us/step - loss: 0.2026 - accuracy: 0.9496 - val_loss: 0.7387 - val_accuracy: 0.6596

Epoch 00055: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035.
Epoch 56/400
139/139 [==============================] - 0s 108us/step - loss: 0.2014 - accuracy: 0.9496 - val_loss: 0.7387 - val_accuracy: 0.6596
Epoch 57/400
139/139 [==============================] - 0s 115us/step - loss: 0.2007 - accuracy: 0.9496 - val_loss: 0.7393 - val_accuracy: 0.6596
Epoch 58/400
139/139 [==============================] - 0s 101us/step - loss: 0.2001 - accuracy: 0.9496 - val_loss: 0.7398 - val_accuracy: 0.6596
Epoch 59/400
139/139 [==============================] - 0s 86us/step - loss: 0.1997 - accuracy: 0.9496 - val_loss: 0.7410 - val_accuracy: 0.6596
Epoch 60/400
139/139 [==============================] - 0s 86us/step - loss: 0.1991 - accuracy: 0.9496 - val_loss: 0.7421 - val_accuracy: 0.6596
Epoch 61/400
139/139 [==============================] - 0s 79us/step - loss: 0.1985 - accuracy: 0.9496 - val_loss: 0.7424 - val_accuracy: 0.6596
Epoch 62/400
139/139 [==============================] - 0s 86us/step - loss: 0.1980 - accuracy: 0.9496 - val_loss: 0.7435 - val_accuracy: 0.6596
Epoch 63/400
139/139 [==============================] - 0s 79us/step - loss: 0.1975 - accuracy: 0.9496 - val_loss: 0.7443 - val_accuracy: 0.6596
Epoch 64/400
139/139 [==============================] - 0s 86us/step - loss: 0.1969 - accuracy: 0.9496 - val_loss: 0.7453 - val_accuracy: 0.6596
Epoch 65/400
139/139 [==============================] - 0s 79us/step - loss: 0.1964 - accuracy: 0.9496 - val_loss: 0.7454 - val_accuracy: 0.6596

Epoch 00065: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05.
Epoch 66/400
139/139 [==============================] - 0s 79us/step - loss: 0.1959 - accuracy: 0.9496 - val_loss: 0.7459 - val_accuracy: 0.6383
Epoch 67/400
139/139 [==============================] - 0s 86us/step - loss: 0.1957 - accuracy: 0.9496 - val_loss: 0.7464 - val_accuracy: 0.6596
Epoch 68/400
139/139 [==============================] - 0s 79us/step - loss: 0.1953 - accuracy: 0.9496 - val_loss: 0.7463 - val_accuracy: 0.6383
Epoch 69/400
139/139 [==============================] - 0s 79us/step - loss: 0.1951 - accuracy: 0.9496 - val_loss: 0.7458 - val_accuracy: 0.6596
Epoch 70/400
139/139 [==============================] - 0s 79us/step - loss: 0.1948 - accuracy: 0.9496 - val_loss: 0.7459 - val_accuracy: 0.6596
Epoch 71/400
139/139 [==============================] - 0s 72us/step - loss: 0.1946 - accuracy: 0.9496 - val_loss: 0.7456 - val_accuracy: 0.6383
Epoch 72/400
139/139 [==============================] - 0s 72us/step - loss: 0.1943 - accuracy: 0.9496 - val_loss: 0.7457 - val_accuracy: 0.6383
Epoch 73/400
139/139 [==============================] - 0s 79us/step - loss: 0.1940 - accuracy: 0.9496 - val_loss: 0.7464 - val_accuracy: 0.6383
Epoch 74/400
139/139 [==============================] - 0s 72us/step - loss: 0.1937 - accuracy: 0.9496 - val_loss: 0.7473 - val_accuracy: 0.6596
Epoch 75/400
139/139 [==============================] - 0s 72us/step - loss: 0.1935 - accuracy: 0.9496 - val_loss: 0.7474 - val_accuracy: 0.6596

Epoch 00075: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05.
Epoch 76/400
139/139 [==============================] - 0s 101us/step - loss: 0.1932 - accuracy: 0.9496 - val_loss: 0.7476 - val_accuracy: 0.6596
Epoch 77/400
139/139 [==============================] - 0s 101us/step - loss: 0.1932 - accuracy: 0.9496 - val_loss: 0.7481 - val_accuracy: 0.6596
Epoch 78/400
139/139 [==============================] - 0s 93us/step - loss: 0.1930 - accuracy: 0.9496 - val_loss: 0.7483 - val_accuracy: 0.6596
Epoch 79/400
139/139 [==============================] - 0s 86us/step - loss: 0.1928 - accuracy: 0.9496 - val_loss: 0.7483 - val_accuracy: 0.6596
Epoch 80/400
139/139 [==============================] - 0s 72us/step - loss: 0.1927 - accuracy: 0.9496 - val_loss: 0.7483 - val_accuracy: 0.6596
Epoch 81/400
139/139 [==============================] - 0s 72us/step - loss: 0.1926 - accuracy: 0.9496 - val_loss: 0.7483 - val_accuracy: 0.6596
Epoch 82/400
139/139 [==============================] - 0s 86us/step - loss: 0.1925 - accuracy: 0.9496 - val_loss: 0.7482 - val_accuracy: 0.6596
Epoch 83/400
139/139 [==============================] - 0s 86us/step - loss: 0.1923 - accuracy: 0.9496 - val_loss: 0.7482 - val_accuracy: 0.6596
Epoch 84/400
139/139 [==============================] - 0s 79us/step - loss: 0.1922 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 85/400
139/139 [==============================] - 0s 79us/step - loss: 0.1920 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596

Epoch 00085: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05.
Epoch 86/400
139/139 [==============================] - 0s 72us/step - loss: 0.1919 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 87/400
139/139 [==============================] - 0s 72us/step - loss: 0.1919 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 88/400
139/139 [==============================] - 0s 72us/step - loss: 0.1918 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 89/400
139/139 [==============================] - 0s 115us/step - loss: 0.1917 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 90/400
139/139 [==============================] - 0s 108us/step - loss: 0.1916 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 91/400
139/139 [==============================] - 0s 86us/step - loss: 0.1916 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 92/400
139/139 [==============================] - 0s 86us/step - loss: 0.1915 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596
Epoch 93/400
139/139 [==============================] - 0s 79us/step - loss: 0.1914 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 94/400
139/139 [==============================] - 0s 86us/step - loss: 0.1914 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 95/400
139/139 [==============================] - 0s 72us/step - loss: 0.1913 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596

Epoch 00095: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06.
Epoch 96/400
139/139 [==============================] - 0s 79us/step - loss: 0.1912 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 97/400
139/139 [==============================] - 0s 79us/step - loss: 0.1912 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 98/400
139/139 [==============================] - 0s 93us/step - loss: 0.1912 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 99/400
139/139 [==============================] - 0s 79us/step - loss: 0.1911 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 100/400
139/139 [==============================] - 0s 79us/step - loss: 0.1911 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 101/400
139/139 [==============================] - 0s 79us/step - loss: 0.1911 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 102/400
139/139 [==============================] - 0s 72us/step - loss: 0.1910 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 103/400
139/139 [==============================] - 0s 79us/step - loss: 0.1910 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 104/400
139/139 [==============================] - 0s 79us/step - loss: 0.1910 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 105/400
139/139 [==============================] - 0s 108us/step - loss: 0.1909 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00105: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06.
Epoch 106/400
139/139 [==============================] - 0s 79us/step - loss: 0.1909 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 107/400
139/139 [==============================] - 0s 72us/step - loss: 0.1909 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 108/400
139/139 [==============================] - 0s 79us/step - loss: 0.1909 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 109/400
139/139 [==============================] - 0s 79us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 110/400
139/139 [==============================] - 0s 79us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 111/400
139/139 [==============================] - 0s 72us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 112/400
139/139 [==============================] - 0s 72us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 113/400
139/139 [==============================] - 0s 72us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 114/400
139/139 [==============================] - 0s 72us/step - loss: 0.1908 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 115/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00115: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06.
Epoch 116/400
139/139 [==============================] - 0s 79us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7486 - val_accuracy: 0.6596
Epoch 117/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 118/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 119/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 120/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 121/400
139/139 [==============================] - 0s 115us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 122/400
139/139 [==============================] - 0s 93us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 123/400
139/139 [==============================] - 0s 79us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596
Epoch 124/400
139/139 [==============================] - 0s 72us/step - loss: 0.1907 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596
Epoch 125/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596

Epoch 00125: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06.
Epoch 126/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596
Epoch 127/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7488 - val_accuracy: 0.6596
Epoch 128/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 129/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 130/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 131/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 132/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 133/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 134/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 135/400
139/139 [==============================] - 0s 108us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00135: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07.
Epoch 136/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 137/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 138/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 139/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 140/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 141/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 142/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 143/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 144/400
139/139 [==============================] - 0s 65us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 145/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00145: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07.
Epoch 146/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 147/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 148/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 149/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 150/400
139/139 [==============================] - 0s 65us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 151/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 152/400
139/139 [==============================] - 0s 122us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 153/400
139/139 [==============================] - 0s 122us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 154/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 155/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00155: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07.
Epoch 156/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 157/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 158/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 159/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 160/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 161/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 162/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 163/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 164/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 165/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00165: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08.
Epoch 166/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 167/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 168/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 169/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 170/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 171/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 172/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 173/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 174/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 175/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00175: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08.
Epoch 176/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 177/400
139/139 [==============================] - 0s 122us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 178/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 179/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 180/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 181/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 182/400
139/139 [==============================] - 0s 129us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 183/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 184/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 185/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00185: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08.
Epoch 186/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 187/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 188/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 189/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 190/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 191/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 192/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 193/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 194/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 195/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00195: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09.
Epoch 196/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 197/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 198/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 199/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 200/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 201/400
139/139 [==============================] - 0s 108us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 202/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 203/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 204/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 205/400
139/139 [==============================] - 0s 137us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00205: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09.
Epoch 206/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 207/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 208/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 209/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 210/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 211/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 212/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 213/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 214/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 215/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00215: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09.
Epoch 216/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 217/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 218/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 219/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 220/400
139/139 [==============================] - 0s 65us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 221/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 222/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 223/400
139/139 [==============================] - 0s 115us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 224/400
139/139 [==============================] - 0s 108us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 225/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00225: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09.
Epoch 226/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 227/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 228/400
139/139 [==============================] - 0s 108us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 229/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 230/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 231/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 232/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 233/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 234/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 235/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00235: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10.
Epoch 236/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 237/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 238/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 239/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 240/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 241/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 242/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 243/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 244/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 245/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00245: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10.
Epoch 246/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 247/400
139/139 [==============================] - 0s 115us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 248/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 249/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 250/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 251/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 252/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 253/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 254/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 255/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00255: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10.
Epoch 256/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 257/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 258/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 259/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 260/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 261/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 262/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 263/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 264/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 265/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00265: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11.
Epoch 266/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 267/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 268/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 269/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 270/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 271/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 272/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 273/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 274/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 275/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00275: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11.
Epoch 276/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 277/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 278/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 279/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 280/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 281/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 282/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 283/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 284/400
139/139 [==============================] - 0s 65us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 285/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00285: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11.
Epoch 286/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 287/400
139/139 [==============================] - 0s 94us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 288/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 289/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 290/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 291/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 292/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 293/400
139/139 [==============================] - 0s 115us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 294/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 295/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00295: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12.
Epoch 296/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 297/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 298/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 299/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 300/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 301/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 302/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 303/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 304/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 305/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00305: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12.
Epoch 306/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 307/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 308/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 309/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 310/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 311/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 312/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 313/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 314/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 315/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00315: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12.
Epoch 316/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 317/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 318/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 319/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 320/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 321/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 322/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 323/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 324/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 325/400
139/139 [==============================] - 0s 108us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00325: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12.
Epoch 326/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 327/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 328/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 329/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 330/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 331/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 332/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 333/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 334/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 335/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00335: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13.
Epoch 336/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 337/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 338/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 339/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 340/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 341/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 342/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 343/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 344/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 345/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00345: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13.
Epoch 346/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 347/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 348/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 349/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 350/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 351/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 352/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 353/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 354/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 355/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00355: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13.
Epoch 356/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 357/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 358/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 359/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 360/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 361/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 362/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 363/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 364/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 365/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00365: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14.
Epoch 366/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 367/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 368/400
139/139 [==============================] - 0s 65us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 369/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 370/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 371/400
139/139 [==============================] - 0s 115us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 372/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 373/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 374/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 375/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00375: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14.
Epoch 376/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 377/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 378/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 379/400
139/139 [==============================] - 0s 137us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 380/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 381/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 382/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 383/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 384/400
139/139 [==============================] - 0s 129us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 385/400
139/139 [==============================] - 0s 86us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00385: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14.
Epoch 386/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 387/400
139/139 [==============================] - 0s 93us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 388/400
139/139 [==============================] - 0s 115us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 389/400
139/139 [==============================] - 0s 101us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 390/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 391/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 392/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 393/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 394/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 395/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596

Epoch 00395: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15.
Epoch 396/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 397/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 398/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 399/400
139/139 [==============================] - 0s 72us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
Epoch 400/400
139/139 [==============================] - 0s 79us/step - loss: 0.1906 - accuracy: 0.9496 - val_loss: 0.7487 - val_accuracy: 0.6596
In [459]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
In [460]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
47/47 [==============================] - 0s 85us/step
test loss: 0.7487381544518978, test accuracy: 0.6595744490623474
In [461]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.6145833333333334
In [462]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.15695067264573992

KMeans

In [128]:
X
Out[128]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12
0 -0.599304 -1.060396 0.465659 0.511787 1.705341 -0.372782 -0.636913 1.449092 0.180180 1.423244 -0.953744 -0.200195
1 -0.906389 1.573440 1.673148 0.108573 0.942015 -0.855402 0.537225 -0.893282 -1.001412 -0.464244 -1.251319 -0.454972
2 -0.149325 0.716481 -1.280822 -1.008531 -1.103704 -1.077175 -0.797686 0.153926 0.441860 -1.161482 -0.398909 1.830174
3 -0.569330 0.172306 -1.126575 -0.291817 -0.013676 -0.722243 -0.124954 -1.269880 -1.213759 -1.266152 -0.895866 1.830174
4 -1.256063 -0.200780 -1.280822 -0.770807 -1.163484 -1.044094 -0.860176 -1.494914 -1.397245 -1.430862 -0.986514 1.830173
5 1.498382 0.070778 1.241647 0.806722 0.329740 1.002855 -0.403863 -0.253624 -1.031682 -0.886917 1.207114 -0.113310
6 1.498381 -0.356738 0.435175 0.467927 1.384000 1.966674 0.658895 -0.657164 -0.969663 -0.907198 0.630731 -0.244667
7 -1.119602 2.289616 0.868130 -0.906024 1.400639 -0.225230 2.092267 -0.746990 0.637557 -0.699149 -0.755684 -0.485588
8 1.498382 0.306863 -0.233871 -0.413790 -1.099299 -0.508942 -0.858685 -0.496630 -1.256396 -1.344641 0.136594 -0.642192
9 -0.564178 0.625076 -0.818535 -0.891287 0.347738 -0.712095 0.252699 0.647252 0.527548 1.252301 0.765167 1.830174
10 -0.778882 1.298999 1.673148 1.062410 0.576150 0.140006 1.558658 -0.784362 -0.261035 -0.030554 -0.181206 0.766608
11 -0.644689 1.907389 0.724477 1.040904 2.334063 0.466749 2.569439 -0.430087 1.103736 -0.797487 -0.756174 -0.052327
12 -0.726607 0.659282 1.063080 -0.773481 0.736899 -0.854707 -0.181454 0.624941 1.157900 1.423244 0.545411 0.884626
13 0.038859 1.272019 -0.111205 -0.210854 -0.227700 -0.708715 1.758175 -0.238102 1.439851 0.256944 0.095708 -0.202718
14 -1.028647 1.835461 0.117175 -0.135897 1.231759 -0.367566 2.569439 -0.533125 1.077953 -0.124864 -0.328656 -0.256960
15 -1.170073 -1.108093 1.429803 -0.936214 0.176597 -1.037196 -0.638081 2.479776 -0.735019 -0.444317 -1.223897 -0.027713
16 0.568533 0.319544 0.717878 -1.026562 0.703422 -1.077175 0.341163 -1.256313 -1.126493 1.423244 -1.294186 -0.649362
17 0.818260 -0.108057 1.673148 0.213826 2.169898 -0.176880 -0.324327 0.079245 -0.325110 0.292708 -0.639169 1.253784
18 -1.346310 -1.140106 -1.280822 -1.026562 -0.843277 -0.873943 -0.950796 -1.073565 -0.253110 1.423244 -1.292263 -1.229049
19 1.250159 -1.050602 -1.222878 -0.928517 0.793165 0.458629 -0.589907 0.276552 -1.106710 1.423244 -1.096356 0.241921
20 0.572797 2.142771 1.581524 2.479349 0.473394 -0.093008 -0.084823 -1.267718 -1.284687 -1.432013 -1.251386 -0.638749
21 0.862369 2.221963 1.673148 -0.555264 -0.741332 -0.892991 0.221351 -0.116798 0.378493 1.217874 -1.085435 -0.256045
22 1.498382 -0.404560 0.195068 -0.943467 0.085181 -0.851680 -0.552763 1.808182 -0.441278 0.067722 -0.458537 1.662711
23 -1.364025 1.371234 1.673148 -0.497512 -0.157640 -0.979363 0.415864 -1.388894 -1.148743 1.407283 -1.247653 -1.119587
24 1.498382 -0.141656 1.241642 0.351671 -0.814862 -0.540976 -0.626294 -0.876743 -0.616984 -0.782913 -0.866230 0.810379
25 1.498382 -0.280382 1.532544 1.674414 -0.727241 -0.446776 -0.604067 0.307172 0.222362 -0.541751 0.720031 0.083232
26 1.075665 -0.107428 1.673148 2.096608 -0.226236 0.860102 0.163175 0.326457 0.203441 -0.281441 -0.236211 1.166053
27 0.540581 0.054204 0.505362 -0.931144 0.436468 -0.842085 0.277074 2.168789 -0.135488 0.863810 -0.521048 1.830173
28 -0.980545 -1.054168 -1.263653 -0.377459 -1.133370 -0.845439 0.110858 -1.019562 0.100180 -0.493484 0.325493 1.830174
29 -0.441960 1.036991 -0.878755 1.298082 -0.666326 0.845380 1.113303 -0.376141 1.439851 -0.664701 1.224927 1.272102
... ... ... ... ... ... ... ... ... ... ... ... ...
156 0.964566 -0.478675 0.600961 -1.000674 -0.743610 -0.499950 -0.696044 -0.174243 -0.686952 1.423244 -1.030797 -1.114829
157 0.270654 0.971091 0.132712 0.510175 0.258442 1.104875 0.136485 1.451829 1.439851 -0.382326 0.865175 -0.288952
158 0.388178 -0.776533 -1.121244 -0.867688 -0.827641 -0.776813 0.973880 -1.056551 -0.596814 1.082405 0.005738 1.830173
159 -0.409488 -0.518845 -0.902126 -0.775313 -0.624732 -0.869850 -0.153020 -0.306394 -0.007331 1.423244 0.011478 1.109037
160 -1.188703 -0.210659 -0.626345 -0.893545 -0.918629 -0.029765 2.569439 -1.381276 -1.323691 -0.262947 -1.017177 0.947571
161 -1.476495 -1.140106 -1.266686 -1.019916 -1.162501 0.086770 -0.874030 -0.598831 1.439851 -1.144185 -1.294186 -1.186653
162 -1.075963 -1.140106 0.768537 -1.026562 2.565083 1.775966 -0.950796 -1.453844 -1.349718 1.423244 -0.660846 -0.569048
163 -1.142157 -1.140106 -0.917613 -0.900746 2.578150 -1.071687 -0.937734 -1.420511 -1.345736 0.130051 -1.058466 -1.165382
164 -0.756762 -0.149815 1.673148 0.274589 0.009055 0.630144 -0.689021 1.604745 -1.053633 0.098821 0.463522 -0.747055
165 0.429405 -0.093236 1.263826 0.077597 0.114344 -0.750943 -0.299174 1.420985 -0.848138 1.423244 0.247779 0.052884
166 0.553018 -0.191462 1.673148 -0.349285 -0.196236 -0.932266 -0.717123 -0.672655 -1.120795 -0.026811 -0.537879 -0.807588
167 0.997841 1.111726 1.673148 0.509323 1.012646 -0.110920 1.607274 0.848930 0.336229 0.759005 0.460918 1.680913
168 1.281068 0.266174 0.446982 0.357357 0.348808 -0.209890 0.845374 1.311409 0.971748 1.423244 0.410051 1.807237
169 0.013523 -0.761110 -1.031584 -0.832016 0.088734 -0.963108 -0.407038 -1.026311 -0.384022 0.115756 -0.062208 1.830174
170 0.666873 -0.889641 1.409534 0.478248 0.640324 -0.028408 -0.137784 -1.195061 -1.343049 -1.104665 -0.345889 1.830173
171 -0.179155 -0.647105 1.673148 0.514384 0.469814 -0.052937 -0.435133 -1.056954 -1.372424 -1.219127 -0.571403 1.303219
172 -0.698104 -1.140106 1.673147 1.002618 -1.030539 0.062021 -0.950796 -0.862502 -1.410550 -1.459509 0.007704 -1.271527
173 -0.221070 -1.120261 0.098266 -1.026027 -1.172739 -0.397041 -0.950796 0.162128 0.351550 1.423244 0.924487 -1.024174
174 -0.987264 -1.140106 1.430551 0.148205 -1.172739 -0.974773 -0.950796 0.046605 0.349775 1.423244 1.009924 -1.168197
175 -0.001472 2.289616 -0.469230 -0.117523 0.087383 -1.038762 -0.386872 -1.069931 0.726815 1.374128 0.589319 0.225042
176 0.759267 2.289616 -0.807487 -0.605891 -0.207466 -0.122817 -0.928360 -1.465473 -0.403582 0.164132 0.091145 1.237470
177 0.303330 1.071917 0.410221 0.816208 0.386825 -0.293228 -0.059633 0.466962 1.121183 1.357331 1.812099 1.148304
178 0.371415 0.428878 -0.437216 0.367689 -0.713660 1.561886 -0.345821 -0.608625 0.845040 0.018040 1.812099 -0.795570
179 -1.106214 0.339351 -0.739020 -0.842920 -0.830001 -0.625489 0.083218 1.521028 1.439851 0.093070 1.538478 -0.976780
180 0.970965 1.730559 1.300739 1.716251 2.578151 0.794509 -0.890606 -1.133358 -1.410550 -1.452853 -1.269329 -1.265541
181 -1.092848 -1.140019 -1.014068 -0.937922 2.397303 0.274619 0.231215 0.010249 -0.572052 0.204326 -1.044505 1.830173
182 -0.432669 -1.140106 -1.275255 -0.740960 2.578150 -1.060066 1.058297 -0.120367 -1.278125 -1.425308 -1.294186 0.144247
183 -1.222408 -1.109984 0.115563 -1.017839 -0.845053 -0.886348 0.708798 1.447822 0.072199 1.423244 -0.469073 0.751450
184 1.498382 0.114899 0.573155 -0.852483 -0.059147 1.559801 -0.088275 0.489614 0.106407 1.089262 -0.583791 0.017900
185 -0.252632 -0.228432 -0.156977 2.073566 1.013665 2.254737 -0.135625 -0.232305 -0.538284 -0.820780 0.474282 -0.303243

186 rows × 12 columns

In [129]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[129]:
[2232.0,
 1887.86126663073,
 1710.7260050049272,
 1558.3692212048186,
 1444.9563111647813,
 1360.8980768251936,
 1285.0938986929364,
 1227.1073156803282,
 1175.9593811616949,
 1123.763719209633,
 1080.6180773747733,
 1058.1006766601422,
 1021.9523094285516,
 986.031006954783]
In [130]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[130]:
[<matplotlib.lines.Line2D at 0x1ef63d7d128>]

K=2

In [131]:
kmeans_ch = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[131]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [132]:
kmeans_ch.labels_
Out[132]:
array([1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0,
       1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1,
       0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0,
       1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0,
       0, 0, 0, 0, 0, 1, 1, 1, 0, 0])
In [133]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[133]:
array([1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0,
       1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1,
       0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0,
       1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1,
       1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0,
       0, 0, 0, 0, 0, 1, 1, 1, 0, 0])
In [134]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [135]:
X
Out[135]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 Cluster chosen
0 -0.599304 -1.060396 0.465659 0.511787 1.705341 -0.372782 -0.636913 1.449092 0.180180 1.423244 -0.953744 -0.200195 1 0
1 -0.906389 1.573440 1.673148 0.108573 0.942015 -0.855402 0.537225 -0.893282 -1.001412 -0.464244 -1.251319 -0.454972 1 0
2 -0.149325 0.716481 -1.280822 -1.008531 -1.103704 -1.077175 -0.797686 0.153926 0.441860 -1.161482 -0.398909 1.830174 1 0
3 -0.569330 0.172306 -1.126575 -0.291817 -0.013676 -0.722243 -0.124954 -1.269880 -1.213759 -1.266152 -0.895866 1.830174 1 0
4 -1.256063 -0.200780 -1.280822 -0.770807 -1.163484 -1.044094 -0.860176 -1.494914 -1.397245 -1.430862 -0.986514 1.830173 1 0
5 1.498382 0.070778 1.241647 0.806722 0.329740 1.002855 -0.403863 -0.253624 -1.031682 -0.886917 1.207114 -0.113310 0 0
6 1.498381 -0.356738 0.435175 0.467927 1.384000 1.966674 0.658895 -0.657164 -0.969663 -0.907198 0.630731 -0.244667 0 0
7 -1.119602 2.289616 0.868130 -0.906024 1.400639 -0.225230 2.092267 -0.746990 0.637557 -0.699149 -0.755684 -0.485588 0 0
8 1.498382 0.306863 -0.233871 -0.413790 -1.099299 -0.508942 -0.858685 -0.496630 -1.256396 -1.344641 0.136594 -0.642192 1 0
9 -0.564178 0.625076 -0.818535 -0.891287 0.347738 -0.712095 0.252699 0.647252 0.527548 1.252301 0.765167 1.830174 1 0
10 -0.778882 1.298999 1.673148 1.062410 0.576150 0.140006 1.558658 -0.784362 -0.261035 -0.030554 -0.181206 0.766608 0 0
11 -0.644689 1.907389 0.724477 1.040904 2.334063 0.466749 2.569439 -0.430087 1.103736 -0.797487 -0.756174 -0.052327 0 0
12 -0.726607 0.659282 1.063080 -0.773481 0.736899 -0.854707 -0.181454 0.624941 1.157900 1.423244 0.545411 0.884626 1 0
13 0.038859 1.272019 -0.111205 -0.210854 -0.227700 -0.708715 1.758175 -0.238102 1.439851 0.256944 0.095708 -0.202718 0 0
14 -1.028647 1.835461 0.117175 -0.135897 1.231759 -0.367566 2.569439 -0.533125 1.077953 -0.124864 -0.328656 -0.256960 0 0
15 -1.170073 -1.108093 1.429803 -0.936214 0.176597 -1.037196 -0.638081 2.479776 -0.735019 -0.444317 -1.223897 -0.027713 1 0
16 0.568533 0.319544 0.717878 -1.026562 0.703422 -1.077175 0.341163 -1.256313 -1.126493 1.423244 -1.294186 -0.649362 1 0
17 0.818260 -0.108057 1.673148 0.213826 2.169898 -0.176880 -0.324327 0.079245 -0.325110 0.292708 -0.639169 1.253784 1 0
18 -1.346310 -1.140106 -1.280822 -1.026562 -0.843277 -0.873943 -0.950796 -1.073565 -0.253110 1.423244 -1.292263 -1.229049 1 0
19 1.250159 -1.050602 -1.222878 -0.928517 0.793165 0.458629 -0.589907 0.276552 -1.106710 1.423244 -1.096356 0.241921 1 0
20 0.572797 2.142771 1.581524 2.479349 0.473394 -0.093008 -0.084823 -1.267718 -1.284687 -1.432013 -1.251386 -0.638749 0 0
21 0.862369 2.221963 1.673148 -0.555264 -0.741332 -0.892991 0.221351 -0.116798 0.378493 1.217874 -1.085435 -0.256045 1 0
22 1.498382 -0.404560 0.195068 -0.943467 0.085181 -0.851680 -0.552763 1.808182 -0.441278 0.067722 -0.458537 1.662711 1 0
23 -1.364025 1.371234 1.673148 -0.497512 -0.157640 -0.979363 0.415864 -1.388894 -1.148743 1.407283 -1.247653 -1.119587 1 0
24 1.498382 -0.141656 1.241642 0.351671 -0.814862 -0.540976 -0.626294 -0.876743 -0.616984 -0.782913 -0.866230 0.810379 1 0
25 1.498382 -0.280382 1.532544 1.674414 -0.727241 -0.446776 -0.604067 0.307172 0.222362 -0.541751 0.720031 0.083232 0 0
26 1.075665 -0.107428 1.673148 2.096608 -0.226236 0.860102 0.163175 0.326457 0.203441 -0.281441 -0.236211 1.166053 0 0
27 0.540581 0.054204 0.505362 -0.931144 0.436468 -0.842085 0.277074 2.168789 -0.135488 0.863810 -0.521048 1.830173 1 0
28 -0.980545 -1.054168 -1.263653 -0.377459 -1.133370 -0.845439 0.110858 -1.019562 0.100180 -0.493484 0.325493 1.830174 1 0
29 -0.441960 1.036991 -0.878755 1.298082 -0.666326 0.845380 1.113303 -0.376141 1.439851 -0.664701 1.224927 1.272102 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
156 0.964566 -0.478675 0.600961 -1.000674 -0.743610 -0.499950 -0.696044 -0.174243 -0.686952 1.423244 -1.030797 -1.114829 1 1
157 0.270654 0.971091 0.132712 0.510175 0.258442 1.104875 0.136485 1.451829 1.439851 -0.382326 0.865175 -0.288952 0 1
158 0.388178 -0.776533 -1.121244 -0.867688 -0.827641 -0.776813 0.973880 -1.056551 -0.596814 1.082405 0.005738 1.830173 1 1
159 -0.409488 -0.518845 -0.902126 -0.775313 -0.624732 -0.869850 -0.153020 -0.306394 -0.007331 1.423244 0.011478 1.109037 1 1
160 -1.188703 -0.210659 -0.626345 -0.893545 -0.918629 -0.029765 2.569439 -1.381276 -1.323691 -0.262947 -1.017177 0.947571 1 1
161 -1.476495 -1.140106 -1.266686 -1.019916 -1.162501 0.086770 -0.874030 -0.598831 1.439851 -1.144185 -1.294186 -1.186653 1 1
162 -1.075963 -1.140106 0.768537 -1.026562 2.565083 1.775966 -0.950796 -1.453844 -1.349718 1.423244 -0.660846 -0.569048 1 1
163 -1.142157 -1.140106 -0.917613 -0.900746 2.578150 -1.071687 -0.937734 -1.420511 -1.345736 0.130051 -1.058466 -1.165382 1 1
164 -0.756762 -0.149815 1.673148 0.274589 0.009055 0.630144 -0.689021 1.604745 -1.053633 0.098821 0.463522 -0.747055 1 1
165 0.429405 -0.093236 1.263826 0.077597 0.114344 -0.750943 -0.299174 1.420985 -0.848138 1.423244 0.247779 0.052884 1 1
166 0.553018 -0.191462 1.673148 -0.349285 -0.196236 -0.932266 -0.717123 -0.672655 -1.120795 -0.026811 -0.537879 -0.807588 1 1
167 0.997841 1.111726 1.673148 0.509323 1.012646 -0.110920 1.607274 0.848930 0.336229 0.759005 0.460918 1.680913 0 1
168 1.281068 0.266174 0.446982 0.357357 0.348808 -0.209890 0.845374 1.311409 0.971748 1.423244 0.410051 1.807237 0 1
169 0.013523 -0.761110 -1.031584 -0.832016 0.088734 -0.963108 -0.407038 -1.026311 -0.384022 0.115756 -0.062208 1.830174 1 1
170 0.666873 -0.889641 1.409534 0.478248 0.640324 -0.028408 -0.137784 -1.195061 -1.343049 -1.104665 -0.345889 1.830173 1 1
171 -0.179155 -0.647105 1.673148 0.514384 0.469814 -0.052937 -0.435133 -1.056954 -1.372424 -1.219127 -0.571403 1.303219 1 1
172 -0.698104 -1.140106 1.673147 1.002618 -1.030539 0.062021 -0.950796 -0.862502 -1.410550 -1.459509 0.007704 -1.271527 1 1
173 -0.221070 -1.120261 0.098266 -1.026027 -1.172739 -0.397041 -0.950796 0.162128 0.351550 1.423244 0.924487 -1.024174 1 1
174 -0.987264 -1.140106 1.430551 0.148205 -1.172739 -0.974773 -0.950796 0.046605 0.349775 1.423244 1.009924 -1.168197 1 1
175 -0.001472 2.289616 -0.469230 -0.117523 0.087383 -1.038762 -0.386872 -1.069931 0.726815 1.374128 0.589319 0.225042 0 1
176 0.759267 2.289616 -0.807487 -0.605891 -0.207466 -0.122817 -0.928360 -1.465473 -0.403582 0.164132 0.091145 1.237470 0 1
177 0.303330 1.071917 0.410221 0.816208 0.386825 -0.293228 -0.059633 0.466962 1.121183 1.357331 1.812099 1.148304 0 1
178 0.371415 0.428878 -0.437216 0.367689 -0.713660 1.561886 -0.345821 -0.608625 0.845040 0.018040 1.812099 -0.795570 0 1
179 -1.106214 0.339351 -0.739020 -0.842920 -0.830001 -0.625489 0.083218 1.521028 1.439851 0.093070 1.538478 -0.976780 0 1
180 0.970965 1.730559 1.300739 1.716251 2.578151 0.794509 -0.890606 -1.133358 -1.410550 -1.452853 -1.269329 -1.265541 0 1
181 -1.092848 -1.140019 -1.014068 -0.937922 2.397303 0.274619 0.231215 0.010249 -0.572052 0.204326 -1.044505 1.830173 1 1
182 -0.432669 -1.140106 -1.275255 -0.740960 2.578150 -1.060066 1.058297 -0.120367 -1.278125 -1.425308 -1.294186 0.144247 1 1
183 -1.222408 -1.109984 0.115563 -1.017839 -0.845053 -0.886348 0.708798 1.447822 0.072199 1.423244 -0.469073 0.751450 1 1
184 1.498382 0.114899 0.573155 -0.852483 -0.059147 1.559801 -0.088275 0.489614 0.106407 1.089262 -0.583791 0.017900 0 1
185 -0.252632 -0.228432 -0.156977 2.073566 1.013665 2.254737 -0.135625 -0.232305 -0.538284 -0.820780 0.474282 -0.303243 0 1

186 rows × 14 columns

In [136]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[136]:
<matplotlib.axes._subplots.AxesSubplot at 0x1ef63d8d7f0>
In [137]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[4]))

Specialized

ANN

In [463]:
X = df_n_ps_std_ch[4]
In [464]:
y = df_n_ps[4]['chosen']
In [465]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [466]:
X_train.shape
Out[466]:
(164, 12)
In [142]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [143]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [144]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [145]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20), 'learning_rate_init': 0.001, 'max_iter': 500}, que permiten obtener un Accuracy de 64.63% y un Kappa del 27.16
Tiempo total: 22.23 minutos
In [467]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20,20]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr = 0.001
epochs = 500
In [468]:
input_tensor = Input(shape = (n0,))
In [469]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [470]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [471]:
model.summary()
Model: "model_25"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_25 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_79 (Dense)             (None, 20)                260       
_________________________________________________________________
dense_80 (Dense)             (None, 20)                420       
_________________________________________________________________
dense_81 (Dense)             (None, 1)                 21        
=================================================================
Total params: 701
Trainable params: 701
Non-trainable params: 0
_________________________________________________________________
In [472]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 164 samples, validate on 55 samples
Epoch 1/500
164/164 [==============================] - 0s 1ms/step - loss: 0.7345 - accuracy: 0.5488 - val_loss: 0.6907 - val_accuracy: 0.5455
Epoch 2/500
164/164 [==============================] - 0s 110us/step - loss: 0.7152 - accuracy: 0.5732 - val_loss: 0.6766 - val_accuracy: 0.5636
Epoch 3/500
164/164 [==============================] - 0s 79us/step - loss: 0.7022 - accuracy: 0.5610 - val_loss: 0.6723 - val_accuracy: 0.5636
Epoch 4/500
164/164 [==============================] - 0s 73us/step - loss: 0.6954 - accuracy: 0.5549 - val_loss: 0.6718 - val_accuracy: 0.5818
Epoch 5/500
164/164 [==============================] - 0s 85us/step - loss: 0.6887 - accuracy: 0.5549 - val_loss: 0.6672 - val_accuracy: 0.5818
Epoch 6/500
164/164 [==============================] - 0s 79us/step - loss: 0.6837 - accuracy: 0.5671 - val_loss: 0.6599 - val_accuracy: 0.6000
Epoch 7/500
164/164 [==============================] - 0s 79us/step - loss: 0.6786 - accuracy: 0.5732 - val_loss: 0.6541 - val_accuracy: 0.6000
Epoch 8/500
164/164 [==============================] - 0s 104us/step - loss: 0.6748 - accuracy: 0.5671 - val_loss: 0.6502 - val_accuracy: 0.6000
Epoch 9/500
164/164 [==============================] - 0s 104us/step - loss: 0.6722 - accuracy: 0.5610 - val_loss: 0.6461 - val_accuracy: 0.5818
Epoch 10/500
164/164 [==============================] - 0s 79us/step - loss: 0.6679 - accuracy: 0.5549 - val_loss: 0.6429 - val_accuracy: 0.6182
Epoch 11/500
164/164 [==============================] - 0s 67us/step - loss: 0.6651 - accuracy: 0.5610 - val_loss: 0.6387 - val_accuracy: 0.6364
Epoch 12/500
164/164 [==============================] - 0s 73us/step - loss: 0.6624 - accuracy: 0.5610 - val_loss: 0.6376 - val_accuracy: 0.6364
Epoch 13/500
164/164 [==============================] - 0s 73us/step - loss: 0.6591 - accuracy: 0.5732 - val_loss: 0.6371 - val_accuracy: 0.6364
Epoch 14/500
164/164 [==============================] - 0s 67us/step - loss: 0.6575 - accuracy: 0.5854 - val_loss: 0.6359 - val_accuracy: 0.6364
Epoch 15/500
164/164 [==============================] - 0s 73us/step - loss: 0.6553 - accuracy: 0.5976 - val_loss: 0.6344 - val_accuracy: 0.6545
Epoch 16/500
164/164 [==============================] - 0s 67us/step - loss: 0.6536 - accuracy: 0.5976 - val_loss: 0.6311 - val_accuracy: 0.6909
Epoch 17/500
164/164 [==============================] - 0s 67us/step - loss: 0.6519 - accuracy: 0.5915 - val_loss: 0.6286 - val_accuracy: 0.6727
Epoch 18/500
164/164 [==============================] - 0s 67us/step - loss: 0.6507 - accuracy: 0.6037 - val_loss: 0.6254 - val_accuracy: 0.6727
Epoch 19/500
164/164 [==============================] - 0s 85us/step - loss: 0.6491 - accuracy: 0.5976 - val_loss: 0.6217 - val_accuracy: 0.6909
Epoch 20/500
164/164 [==============================] - 0s 73us/step - loss: 0.6474 - accuracy: 0.5976 - val_loss: 0.6230 - val_accuracy: 0.6727
Epoch 21/500
164/164 [==============================] - 0s 73us/step - loss: 0.6458 - accuracy: 0.6098 - val_loss: 0.6213 - val_accuracy: 0.6727
Epoch 22/500
164/164 [==============================] - 0s 73us/step - loss: 0.6445 - accuracy: 0.6098 - val_loss: 0.6206 - val_accuracy: 0.6909
Epoch 23/500
164/164 [==============================] - 0s 79us/step - loss: 0.6431 - accuracy: 0.6098 - val_loss: 0.6210 - val_accuracy: 0.6909
Epoch 24/500
164/164 [==============================] - 0s 67us/step - loss: 0.6418 - accuracy: 0.6159 - val_loss: 0.6209 - val_accuracy: 0.6909
Epoch 25/500
164/164 [==============================] - 0s 67us/step - loss: 0.6406 - accuracy: 0.6159 - val_loss: 0.6205 - val_accuracy: 0.6909
Epoch 26/500
164/164 [==============================] - 0s 85us/step - loss: 0.6394 - accuracy: 0.6159 - val_loss: 0.6213 - val_accuracy: 0.6727

Epoch 00026: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 27/500
164/164 [==============================] - 0s 79us/step - loss: 0.6382 - accuracy: 0.6098 - val_loss: 0.6231 - val_accuracy: 0.6909
Epoch 28/500
164/164 [==============================] - 0s 67us/step - loss: 0.6375 - accuracy: 0.6098 - val_loss: 0.6234 - val_accuracy: 0.6909
Epoch 29/500
164/164 [==============================] - 0s 61us/step - loss: 0.6367 - accuracy: 0.6098 - val_loss: 0.6217 - val_accuracy: 0.6727
Epoch 30/500
164/164 [==============================] - 0s 73us/step - loss: 0.6359 - accuracy: 0.6159 - val_loss: 0.6199 - val_accuracy: 0.6727
Epoch 31/500
164/164 [==============================] - 0s 67us/step - loss: 0.6352 - accuracy: 0.6159 - val_loss: 0.6197 - val_accuracy: 0.6727
Epoch 32/500
164/164 [==============================] - 0s 85us/step - loss: 0.6344 - accuracy: 0.6159 - val_loss: 0.6200 - val_accuracy: 0.6727
Epoch 33/500
164/164 [==============================] - 0s 73us/step - loss: 0.6334 - accuracy: 0.6220 - val_loss: 0.6209 - val_accuracy: 0.6545
Epoch 34/500
164/164 [==============================] - 0s 67us/step - loss: 0.6325 - accuracy: 0.6220 - val_loss: 0.6205 - val_accuracy: 0.6727
Epoch 35/500
164/164 [==============================] - 0s 79us/step - loss: 0.6318 - accuracy: 0.6159 - val_loss: 0.6181 - val_accuracy: 0.6727
Epoch 36/500
164/164 [==============================] - 0s 158us/step - loss: 0.6310 - accuracy: 0.6098 - val_loss: 0.6161 - val_accuracy: 0.6909

Epoch 00036: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 37/500
164/164 [==============================] - 0s 91us/step - loss: 0.6306 - accuracy: 0.6220 - val_loss: 0.6150 - val_accuracy: 0.6909
Epoch 38/500
164/164 [==============================] - 0s 91us/step - loss: 0.6300 - accuracy: 0.6220 - val_loss: 0.6148 - val_accuracy: 0.6909
Epoch 39/500
164/164 [==============================] - 0s 73us/step - loss: 0.6298 - accuracy: 0.6220 - val_loss: 0.6149 - val_accuracy: 0.6909
Epoch 40/500
164/164 [==============================] - 0s 67us/step - loss: 0.6294 - accuracy: 0.6220 - val_loss: 0.6148 - val_accuracy: 0.6909
Epoch 41/500
164/164 [==============================] - 0s 67us/step - loss: 0.6289 - accuracy: 0.6220 - val_loss: 0.6157 - val_accuracy: 0.6727
Epoch 42/500
164/164 [==============================] - 0s 67us/step - loss: 0.6285 - accuracy: 0.6159 - val_loss: 0.6164 - val_accuracy: 0.6727
Epoch 43/500
164/164 [==============================] - 0s 67us/step - loss: 0.6281 - accuracy: 0.6098 - val_loss: 0.6173 - val_accuracy: 0.6727
Epoch 44/500
164/164 [==============================] - 0s 67us/step - loss: 0.6277 - accuracy: 0.6098 - val_loss: 0.6177 - val_accuracy: 0.6727
Epoch 45/500
164/164 [==============================] - 0s 79us/step - loss: 0.6275 - accuracy: 0.6037 - val_loss: 0.6184 - val_accuracy: 0.6727
Epoch 46/500
164/164 [==============================] - 0s 73us/step - loss: 0.6271 - accuracy: 0.6037 - val_loss: 0.6191 - val_accuracy: 0.6727

Epoch 00046: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 47/500
164/164 [==============================] - 0s 73us/step - loss: 0.6268 - accuracy: 0.6037 - val_loss: 0.6190 - val_accuracy: 0.6727
Epoch 48/500
164/164 [==============================] - 0s 67us/step - loss: 0.6267 - accuracy: 0.6098 - val_loss: 0.6184 - val_accuracy: 0.6727
Epoch 49/500
164/164 [==============================] - 0s 73us/step - loss: 0.6265 - accuracy: 0.6159 - val_loss: 0.6185 - val_accuracy: 0.6727
Epoch 50/500
164/164 [==============================] - 0s 67us/step - loss: 0.6264 - accuracy: 0.6159 - val_loss: 0.6182 - val_accuracy: 0.6727
Epoch 51/500
164/164 [==============================] - 0s 67us/step - loss: 0.6262 - accuracy: 0.6159 - val_loss: 0.6180 - val_accuracy: 0.6727
Epoch 52/500
164/164 [==============================] - 0s 73us/step - loss: 0.6259 - accuracy: 0.6159 - val_loss: 0.6182 - val_accuracy: 0.6727
Epoch 53/500
164/164 [==============================] - 0s 67us/step - loss: 0.6257 - accuracy: 0.6159 - val_loss: 0.6186 - val_accuracy: 0.6727
Epoch 54/500
164/164 [==============================] - 0s 67us/step - loss: 0.6256 - accuracy: 0.6159 - val_loss: 0.6191 - val_accuracy: 0.6727
Epoch 55/500
164/164 [==============================] - 0s 61us/step - loss: 0.6253 - accuracy: 0.6159 - val_loss: 0.6192 - val_accuracy: 0.6727
Epoch 56/500
164/164 [==============================] - 0s 61us/step - loss: 0.6251 - accuracy: 0.6159 - val_loss: 0.6195 - val_accuracy: 0.6364

Epoch 00056: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 57/500
164/164 [==============================] - 0s 73us/step - loss: 0.6249 - accuracy: 0.6098 - val_loss: 0.6194 - val_accuracy: 0.6364
Epoch 58/500
164/164 [==============================] - 0s 85us/step - loss: 0.6249 - accuracy: 0.6098 - val_loss: 0.6195 - val_accuracy: 0.6545
Epoch 59/500
164/164 [==============================] - 0s 116us/step - loss: 0.6247 - accuracy: 0.6098 - val_loss: 0.6194 - val_accuracy: 0.6545
Epoch 60/500
164/164 [==============================] - 0s 85us/step - loss: 0.6246 - accuracy: 0.6098 - val_loss: 0.6194 - val_accuracy: 0.6545
Epoch 61/500
164/164 [==============================] - 0s 79us/step - loss: 0.6245 - accuracy: 0.6098 - val_loss: 0.6194 - val_accuracy: 0.6545
Epoch 62/500
164/164 [==============================] - 0s 73us/step - loss: 0.6244 - accuracy: 0.6159 - val_loss: 0.6193 - val_accuracy: 0.6545
Epoch 63/500
164/164 [==============================] - 0s 79us/step - loss: 0.6243 - accuracy: 0.6220 - val_loss: 0.6192 - val_accuracy: 0.6545
Epoch 64/500
164/164 [==============================] - 0s 73us/step - loss: 0.6242 - accuracy: 0.6220 - val_loss: 0.6193 - val_accuracy: 0.6545
Epoch 65/500
164/164 [==============================] - 0s 73us/step - loss: 0.6241 - accuracy: 0.6220 - val_loss: 0.6194 - val_accuracy: 0.6545
Epoch 66/500
164/164 [==============================] - 0s 73us/step - loss: 0.6240 - accuracy: 0.6220 - val_loss: 0.6196 - val_accuracy: 0.6545

Epoch 00066: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 67/500
164/164 [==============================] - 0s 67us/step - loss: 0.6239 - accuracy: 0.6220 - val_loss: 0.6196 - val_accuracy: 0.6545
Epoch 68/500
164/164 [==============================] - 0s 67us/step - loss: 0.6238 - accuracy: 0.6220 - val_loss: 0.6197 - val_accuracy: 0.6545
Epoch 69/500
164/164 [==============================] - 0s 79us/step - loss: 0.6238 - accuracy: 0.6280 - val_loss: 0.6197 - val_accuracy: 0.6545
Epoch 70/500
164/164 [==============================] - 0s 67us/step - loss: 0.6238 - accuracy: 0.6280 - val_loss: 0.6197 - val_accuracy: 0.6545
Epoch 71/500
164/164 [==============================] - 0s 79us/step - loss: 0.6237 - accuracy: 0.6280 - val_loss: 0.6198 - val_accuracy: 0.6545
Epoch 72/500
164/164 [==============================] - 0s 73us/step - loss: 0.6237 - accuracy: 0.6280 - val_loss: 0.6198 - val_accuracy: 0.6545
Epoch 73/500
164/164 [==============================] - 0s 73us/step - loss: 0.6236 - accuracy: 0.6280 - val_loss: 0.6198 - val_accuracy: 0.6545
Epoch 74/500
164/164 [==============================] - 0s 98us/step - loss: 0.6235 - accuracy: 0.6280 - val_loss: 0.6198 - val_accuracy: 0.6545
Epoch 75/500
164/164 [==============================] - 0s 73us/step - loss: 0.6235 - accuracy: 0.6280 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 76/500
164/164 [==============================] - 0s 67us/step - loss: 0.6234 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545

Epoch 00076: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 77/500
164/164 [==============================] - 0s 67us/step - loss: 0.6234 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 78/500
164/164 [==============================] - 0s 67us/step - loss: 0.6234 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 79/500
164/164 [==============================] - 0s 67us/step - loss: 0.6233 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 80/500
164/164 [==============================] - 0s 61us/step - loss: 0.6233 - accuracy: 0.6341 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 81/500
164/164 [==============================] - 0s 61us/step - loss: 0.6233 - accuracy: 0.6341 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 82/500
164/164 [==============================] - 0s 67us/step - loss: 0.6233 - accuracy: 0.6341 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 83/500
164/164 [==============================] - 0s 67us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 84/500
164/164 [==============================] - 0s 67us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 85/500
164/164 [==============================] - 0s 110us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 86/500
164/164 [==============================] - 0s 79us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545

Epoch 00086: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06.
Epoch 87/500
164/164 [==============================] - 0s 73us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 88/500
164/164 [==============================] - 0s 67us/step - loss: 0.6232 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 89/500
164/164 [==============================] - 0s 67us/step - loss: 0.6231 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 90/500
164/164 [==============================] - 0s 73us/step - loss: 0.6231 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 91/500
164/164 [==============================] - 0s 98us/step - loss: 0.6231 - accuracy: 0.6341 - val_loss: 0.6199 - val_accuracy: 0.6545
Epoch 92/500
164/164 [==============================] - 0s 73us/step - loss: 0.6231 - accuracy: 0.6280 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 93/500
164/164 [==============================] - 0s 73us/step - loss: 0.6231 - accuracy: 0.6280 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 94/500
164/164 [==============================] - 0s 67us/step - loss: 0.6231 - accuracy: 0.6280 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 95/500
164/164 [==============================] - 0s 85us/step - loss: 0.6231 - accuracy: 0.6280 - val_loss: 0.6200 - val_accuracy: 0.6545
Epoch 96/500
164/164 [==============================] - 0s 73us/step - loss: 0.6231 - accuracy: 0.6280 - val_loss: 0.6200 - val_accuracy: 0.6545

Epoch 00096: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06.
Epoch 97/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6280 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 98/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6280 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 99/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 100/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 101/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 102/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 103/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 104/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 105/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 106/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00106: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06.
Epoch 107/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 108/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 109/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 110/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 111/500
164/164 [==============================] - 0s 91us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 112/500
164/164 [==============================] - 0s 85us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 113/500
164/164 [==============================] - 0s 85us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 114/500
164/164 [==============================] - 0s 67us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 115/500
164/164 [==============================] - 0s 85us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 116/500
164/164 [==============================] - 0s 91us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545

Epoch 00116: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07.
Epoch 117/500
164/164 [==============================] - 0s 91us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 118/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 119/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 120/500
164/164 [==============================] - 0s 73us/step - loss: 0.6230 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 121/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 122/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 123/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 124/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 125/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 126/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00126: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07.
Epoch 127/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 128/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 129/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 130/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 131/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 132/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 133/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 134/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 135/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 136/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545

Epoch 00136: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07.
Epoch 137/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6201 - val_accuracy: 0.6545
Epoch 138/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 139/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 140/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 141/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 142/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 143/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 144/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 145/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 146/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00146: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07.
Epoch 147/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 148/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 149/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 150/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 151/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 152/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 153/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 154/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 155/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 156/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00156: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08.
Epoch 157/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 158/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 159/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 160/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 161/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 162/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 163/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 164/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 165/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 166/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00166: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08.
Epoch 167/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 168/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 169/500
164/164 [==============================] - 0s 116us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 170/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 171/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 172/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 173/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 174/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 175/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 176/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00176: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08.
Epoch 177/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 178/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 179/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 180/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 181/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 182/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 183/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 184/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 185/500
164/164 [==============================] - 0s 128us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 186/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00186: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09.
Epoch 187/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 188/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 189/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 190/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 191/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 192/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 193/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 194/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 195/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 196/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00196: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09.
Epoch 197/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 198/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 199/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 200/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 201/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 202/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 203/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 204/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 205/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 206/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00206: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09.
Epoch 207/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 208/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 209/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 210/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 211/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 212/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 213/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 214/500
164/164 [==============================] - 0s 97us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 215/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 216/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00216: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10.
Epoch 217/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 218/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 219/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 220/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 221/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 222/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 223/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 224/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 225/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 226/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00226: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10.
Epoch 227/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 228/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 229/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 230/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 231/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 232/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 233/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 234/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 235/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 236/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00236: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10.
Epoch 237/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 238/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 239/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 240/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 241/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 242/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 243/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 244/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 245/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 246/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00246: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10.
Epoch 247/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 248/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 249/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 250/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 251/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 252/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 253/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 254/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 255/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 256/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00256: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11.
Epoch 257/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 258/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 259/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 260/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 261/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 262/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 263/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 264/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 265/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 266/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00266: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11.
Epoch 267/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 268/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 269/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 270/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 271/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 272/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 273/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 274/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 275/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 276/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00276: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11.
Epoch 277/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 278/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 279/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 280/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 281/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 282/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 283/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 284/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 285/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 286/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00286: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12.
Epoch 287/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 288/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 289/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 290/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 291/500
164/164 [==============================] - ETA: 0s - loss: 0.6214 - accuracy: 0.68 - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 292/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 293/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 294/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 295/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 296/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00296: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12.
Epoch 297/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 298/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 299/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 300/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 301/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 302/500
164/164 [==============================] - 0s 104us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 303/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 304/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 305/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 306/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00306: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12.
Epoch 307/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 308/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 309/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 310/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 311/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 312/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 313/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 314/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 315/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 316/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00316: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13.
Epoch 317/500
164/164 [==============================] - 0s 110us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 318/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 319/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 320/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 321/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 322/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 323/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 324/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 325/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 326/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00326: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13.
Epoch 327/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 328/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 329/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 330/500
164/164 [==============================] - 0s 104us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 331/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 332/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 333/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 334/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 335/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 336/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00336: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13.
Epoch 337/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 338/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 339/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 340/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 341/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 342/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 343/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 344/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 345/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 346/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00346: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13.
Epoch 347/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 348/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 349/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 350/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 351/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 352/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 353/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 354/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 355/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 356/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00356: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14.
Epoch 357/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 358/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 359/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 360/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 361/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 362/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 363/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 364/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 365/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 366/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00366: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14.
Epoch 367/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 368/500
164/164 [==============================] - 0s 104us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 369/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 370/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 371/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 372/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 373/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 374/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 375/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 376/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00376: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14.
Epoch 377/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 378/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 379/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 380/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 381/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 382/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 383/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 384/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 385/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 386/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00386: ReduceLROnPlateau reducing learning rate to 7.275957959772868e-15.
Epoch 387/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 388/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 389/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 390/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 391/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 392/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 393/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 394/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 395/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 396/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00396: ReduceLROnPlateau reducing learning rate to 3.637978979886434e-15.
Epoch 397/500
164/164 [==============================] - 0s 122us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 398/500
164/164 [==============================] - 0s 97us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 399/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 400/500
164/164 [==============================] - 0s 140us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 401/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 402/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 403/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 404/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 405/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 406/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00406: ReduceLROnPlateau reducing learning rate to 1.818989489943217e-15.
Epoch 407/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 408/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 409/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 410/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 411/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 412/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 413/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 414/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 415/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 416/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00416: ReduceLROnPlateau reducing learning rate to 9.094947449716085e-16.
Epoch 417/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 418/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 419/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 420/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 421/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 422/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 423/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 424/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 425/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 426/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00426: ReduceLROnPlateau reducing learning rate to 4.547473724858043e-16.
Epoch 427/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 428/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 429/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 430/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 431/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 432/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 433/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 434/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 435/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 436/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00436: ReduceLROnPlateau reducing learning rate to 2.2737368624290214e-16.
Epoch 437/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 438/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 439/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 440/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 441/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 442/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 443/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 444/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 445/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 446/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00446: ReduceLROnPlateau reducing learning rate to 1.1368684312145107e-16.
Epoch 447/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 448/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 449/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 450/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 451/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 452/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 453/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 454/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 455/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 456/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00456: ReduceLROnPlateau reducing learning rate to 5.684342156072553e-17.
Epoch 457/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 458/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 459/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 460/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 461/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 462/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 463/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 464/500
164/164 [==============================] - 0s 79us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 465/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 466/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00466: ReduceLROnPlateau reducing learning rate to 2.842171078036277e-17.
Epoch 467/500
164/164 [==============================] - 0s 98us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 468/500
164/164 [==============================] - 0s 85us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 469/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 470/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 471/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 472/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 473/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 474/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 475/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 476/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00476: ReduceLROnPlateau reducing learning rate to 1.4210855390181384e-17.
Epoch 477/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 478/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 479/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 480/500
164/164 [==============================] - 0s 91us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 481/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 482/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 483/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 484/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 485/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 486/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00486: ReduceLROnPlateau reducing learning rate to 7.105427695090692e-18.
Epoch 487/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 488/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 489/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 490/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 491/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 492/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 493/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 494/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 495/500
164/164 [==============================] - 0s 67us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 496/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545

Epoch 00496: ReduceLROnPlateau reducing learning rate to 3.552713847545346e-18.
Epoch 497/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 498/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 499/500
164/164 [==============================] - 0s 73us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
Epoch 500/500
164/164 [==============================] - 0s 61us/step - loss: 0.6229 - accuracy: 0.6341 - val_loss: 0.6202 - val_accuracy: 0.6545
In [473]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 500)
In [474]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
55/55 [==============================] - 0s 55us/step
test loss: 0.6201585899699819, test accuracy: 0.6545454263687134
In [475]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.7365591397849462
In [476]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.2943956785955435

KMeans

In [156]:
X
Out[156]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12
0 1.752761 -1.114598 -0.348132 2.966304 -0.925235 0.552064 -0.710296 0.412009 -0.784684 -1.667162 -0.834151 -1.566379
1 -1.387006 -0.333039 0.041297 -0.917052 0.656635 -1.022407 -0.805166 -0.905135 -0.708805 1.299875 -0.948816 -1.413196
2 -0.628834 2.234144 0.613536 -0.978644 0.836157 -0.735689 -0.059767 -1.571350 1.278264 -1.103616 -1.153426 0.685062
3 0.081693 1.765530 -0.365668 0.759057 -1.136519 -0.071939 -0.412587 -1.310708 1.465231 -1.266573 0.040076 -0.308065
4 0.056206 1.646501 0.508800 0.525847 0.506842 -0.390517 -0.241209 -0.409725 1.465231 -0.148455 -0.354779 0.188297
5 -0.475284 0.673200 -1.077774 0.360339 -0.032017 0.910768 1.405699 -0.748908 1.465231 -0.933838 -0.225835 -0.922170
6 0.163433 -0.011304 -1.057752 1.128932 0.026597 1.324397 -0.060004 -0.869144 1.465231 -0.902156 0.598522 -1.044263
7 0.110500 0.178211 -1.394026 0.897710 -1.243991 -0.384906 -0.976009 -1.391712 1.465231 -1.735560 -0.662454 -1.567335
8 0.595793 -0.898440 1.924099 0.051119 0.528166 0.377574 -0.881206 1.992593 -0.169332 0.289379 -1.264549 -0.986278
9 -0.017542 -1.386211 0.574605 -1.268664 -0.911663 -1.241512 -1.036865 1.992592 -1.595580 0.436331 -1.528923 -1.224807
10 0.796579 -1.216664 -0.112327 -0.690935 1.077368 0.874900 -0.528379 1.992593 -0.347105 1.295264 0.069115 -0.797501
11 1.761253 -0.949116 -0.297777 -0.913826 -0.875567 -0.968315 -0.407542 -0.016419 -0.556271 0.391283 0.743737 0.460341
12 1.055147 -0.394702 1.258031 -0.517345 -0.021328 -0.557321 0.669125 1.243976 0.262937 0.337908 0.485227 2.007205
13 1.761253 -0.435392 0.592085 -0.692391 0.535758 -0.708164 -0.382176 0.125232 -0.083947 0.888176 0.994199 1.087612
14 -0.137413 -0.980041 -1.297302 -0.880795 1.395884 -0.901503 -0.756382 -0.304071 -1.277311 1.299875 -1.162641 -1.274290
15 0.017749 -0.936126 -1.136240 -0.862914 0.782285 -0.230220 -0.734923 -0.295573 -1.221528 1.299875 -1.058979 -1.237947
16 -0.382429 -1.386211 -1.437693 -1.268664 0.481905 -1.219324 -1.036865 0.284908 -1.749408 1.299875 -1.532659 -1.596787
17 -1.227998 1.294887 2.200026 -0.318659 2.364886 -1.078770 -0.029818 -1.427905 -1.667310 0.632080 -1.532496 0.393871
18 -1.076569 -0.776099 -0.576736 -0.929915 0.188222 -1.067980 0.156056 -0.671783 -0.806376 1.299875 -0.994450 -0.887100
19 -1.180746 0.385053 2.200026 -0.005979 1.302093 -0.928084 -0.007355 -0.793418 -1.747571 -0.917240 -1.528514 -0.042363
20 1.192498 -0.224351 -0.206238 0.217971 0.304072 0.949289 -0.514273 1.091683 1.465231 -0.718685 -0.298094 -0.264379
21 1.213472 0.048184 -0.600330 0.007879 0.177356 1.215438 -0.394095 0.833193 1.465231 -0.381808 -0.415137 -0.479222
22 -0.004572 -0.052026 2.200026 -0.008377 1.967963 -0.419263 -0.302302 0.046448 -0.343181 1.114245 -0.245566 0.544195
23 -0.229893 0.630662 0.484895 0.048344 -0.498767 0.662733 -0.509073 0.013777 1.465231 0.438954 -0.681106 -0.745425
24 0.217943 -0.177066 0.659322 -0.134414 1.650468 -0.583176 -0.559211 -0.406584 -0.598204 1.299875 -0.406852 0.234437
25 -0.333597 0.972324 -0.721724 -0.554449 -0.493410 0.518707 0.321268 -1.070955 -0.564510 -0.833869 1.695138 -0.783758
26 -0.602430 -0.028610 -0.826715 -0.377974 -0.696055 -0.182813 -0.120202 -0.886178 -0.584506 -0.673011 1.695138 -0.599349
27 -0.251812 -0.171571 -0.358821 -0.244636 -0.013843 0.945000 1.038022 -0.727984 0.773167 -0.048067 1.695138 0.563056
28 -0.368678 2.234146 -0.330756 -0.138877 -0.559553 -0.851160 -0.744847 -0.884446 -0.828451 -1.011890 -0.568588 0.130393
29 -0.578621 2.234146 -0.562376 -0.105376 -0.523903 -0.825786 -0.690314 -0.809436 -0.870132 -1.142574 -0.536734 0.859716
... ... ... ... ... ... ... ... ... ... ... ... ...
189 -0.186741 -1.165084 -0.921181 1.397816 -0.045251 1.304725 -0.153535 1.038843 1.465231 0.175798 1.209572 -0.558994
190 -0.163966 0.884601 1.657605 -0.260803 1.379747 -0.732537 -0.007962 1.276498 -0.419568 1.299875 -0.588765 1.807669
191 -0.254097 -0.617571 0.535718 -0.297186 1.865594 -0.310035 0.916200 1.349280 -0.168599 1.299875 -0.479982 1.556484
192 -1.359431 0.228998 0.259367 -1.169606 0.516753 -1.080823 1.794240 -0.257346 -1.633246 0.226711 -1.071882 2.007205
193 1.622617 2.234146 -1.127605 -0.273622 -1.141311 0.831169 -0.681956 -1.023930 0.097381 -1.505036 -0.432936 -0.703132
194 -0.467818 -0.314482 0.014154 0.025208 -0.807816 -0.093075 3.055878 0.013994 -0.514838 -0.698969 0.430445 -0.302544
195 -1.423661 -0.599038 -0.707969 -1.084898 -1.206212 -0.474886 1.273960 -1.605019 -1.163780 -1.777082 1.695138 -1.000177
196 1.761253 -0.325612 0.357954 -0.709817 1.043699 -0.927401 -0.653399 -0.107407 -0.553233 -0.078311 -0.687092 -0.080809
197 1.761253 1.028574 1.374947 0.116683 1.136600 -0.187955 -0.195137 0.341797 -0.856237 0.366485 -0.658035 0.645642
198 1.761253 0.050043 0.687757 -0.434894 0.705291 -0.680095 -0.549325 0.816661 -0.567882 0.584770 -0.302600 -0.144573
199 -0.919189 -0.200271 -1.032880 -0.020720 -1.069924 -0.649964 -0.843737 -0.600356 1.465231 -0.271885 0.807204 -0.901281
200 -0.988568 -0.189392 -1.028379 0.102199 -1.085401 -0.667213 -0.580786 -0.498427 1.465231 -0.287173 0.737684 -0.938325
201 -0.779270 0.427427 -0.948100 -0.165441 -1.033638 0.272258 -0.022274 -0.198353 1.465231 -0.072695 1.561935 -0.848980
202 1.116127 -1.139042 0.168460 -1.122851 -0.854043 1.067951 -0.735589 0.656822 0.343006 1.299875 0.776152 -0.247153
203 0.762440 -1.190866 -0.050517 -1.116856 -1.095215 0.632777 -0.797158 1.167398 1.149430 1.299875 1.392402 0.047424
204 0.058411 -1.128496 -1.328850 -1.229911 -1.074617 -0.085788 -0.812628 0.326031 0.625580 1.299875 1.290873 -0.077297
205 -0.720270 -0.697672 -0.720394 1.813110 -0.859595 -0.861783 -0.908744 1.216314 1.411592 -0.405169 1.695138 -0.439681
206 -0.364461 -0.010160 -1.095500 0.914425 -1.040229 0.885108 -0.998962 0.772555 1.465231 -0.838613 0.940082 -0.955178
207 -0.272236 -0.196860 -0.727982 0.141418 -0.904008 -0.065613 -0.944991 0.795648 0.611803 0.903563 1.695137 -0.158991
208 1.761253 -0.082523 -0.122308 0.265346 -0.485445 0.500219 0.126422 -0.354469 0.139245 -0.560750 1.149004 0.354466
209 1.573516 -0.161473 -0.359909 1.241760 -0.664508 1.002463 -0.076931 0.519366 1.465231 -0.646843 0.532185 -0.215564
210 1.761253 0.417875 -0.918851 -0.929813 -0.982357 -0.771036 -0.949451 -0.679224 -0.652751 -1.604058 -0.786724 -0.848956
211 1.761253 0.145550 1.710575 1.914520 1.233461 2.474950 0.953725 1.263177 0.830563 0.011243 0.602765 -0.024981
212 0.470346 -0.334996 2.200026 1.858653 0.847856 1.364055 0.061293 1.366431 -0.301856 -0.501569 -0.235203 -0.695720
213 -0.576457 -0.914445 1.070087 0.337357 0.306857 0.394672 -0.372356 -0.450091 -0.240434 0.165141 1.695138 0.345150
214 1.761253 -0.591066 -0.690824 2.065965 -0.721336 -0.340791 -0.483151 0.855908 0.529996 -1.116013 0.527710 -0.050391
215 1.490806 -1.368871 -1.151960 2.846487 -0.924825 0.052478 -0.970103 1.992593 0.913611 -0.452243 0.613814 -1.529552
216 0.191801 -1.348512 -1.315236 -0.455163 -1.244101 -1.240530 -1.036865 -0.531344 1.465231 -1.809822 -1.350349 -1.513618
217 -0.002098 2.039653 -0.752917 0.971355 -0.795869 0.431147 -0.753214 0.043687 1.465231 -1.105865 -0.938582 -0.984328
218 -0.098688 -0.923087 -0.917548 0.312310 -0.183969 0.248120 -0.545773 0.584070 0.733937 -0.697562 1.695138 -0.497086

219 rows × 12 columns

In [157]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[157]:
[2628.0,
 2183.3505259693475,
 1946.7709026205612,
 1764.2381744586387,
 1667.52081260375,
 1588.695626080069,
 1521.6546082793252,
 1465.3583612489235,
 1381.9950671234758,
 1338.0035802619554,
 1275.054416118868,
 1244.4334212962274,
 1201.2393010186188,
 1166.7934845697623]
In [158]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[158]:
[<matplotlib.lines.Line2D at 0x1ef5e4c4748>]

K=2

In [159]:
kmeans_ch = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[159]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [160]:
kmeans_ch.labels_
Out[160]:
array([0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
       1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
       1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
       1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0,
       0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1,
       1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
In [161]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[161]:
array([0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
       1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
       1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
       1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
       1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0,
       0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1,
       1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
In [162]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [163]:
X
Out[163]:
chromagramfiles_1 chromagramfiles_2 chromagramfiles_3 chromagramfiles_4 chromagramfiles_5 chromagramfiles_6 chromagramfiles_7 chromagramfiles_8 chromagramfiles_9 chromagramfiles_10 chromagramfiles_11 chromagramfiles_12 Cluster chosen
0 1.752761 -1.114598 -0.348132 2.966304 -0.925235 0.552064 -0.710296 0.412009 -0.784684 -1.667162 -0.834151 -1.566379 0 0
1 -1.387006 -0.333039 0.041297 -0.917052 0.656635 -1.022407 -0.805166 -0.905135 -0.708805 1.299875 -0.948816 -1.413196 1 0
2 -0.628834 2.234144 0.613536 -0.978644 0.836157 -0.735689 -0.059767 -1.571350 1.278264 -1.103616 -1.153426 0.685062 1 0
3 0.081693 1.765530 -0.365668 0.759057 -1.136519 -0.071939 -0.412587 -1.310708 1.465231 -1.266573 0.040076 -0.308065 0 0
4 0.056206 1.646501 0.508800 0.525847 0.506842 -0.390517 -0.241209 -0.409725 1.465231 -0.148455 -0.354779 0.188297 0 0
5 -0.475284 0.673200 -1.077774 0.360339 -0.032017 0.910768 1.405699 -0.748908 1.465231 -0.933838 -0.225835 -0.922170 0 0
6 0.163433 -0.011304 -1.057752 1.128932 0.026597 1.324397 -0.060004 -0.869144 1.465231 -0.902156 0.598522 -1.044263 0 0
7 0.110500 0.178211 -1.394026 0.897710 -1.243991 -0.384906 -0.976009 -1.391712 1.465231 -1.735560 -0.662454 -1.567335 0 0
8 0.595793 -0.898440 1.924099 0.051119 0.528166 0.377574 -0.881206 1.992593 -0.169332 0.289379 -1.264549 -0.986278 1 0
9 -0.017542 -1.386211 0.574605 -1.268664 -0.911663 -1.241512 -1.036865 1.992592 -1.595580 0.436331 -1.528923 -1.224807 1 0
10 0.796579 -1.216664 -0.112327 -0.690935 1.077368 0.874900 -0.528379 1.992593 -0.347105 1.295264 0.069115 -0.797501 1 0
11 1.761253 -0.949116 -0.297777 -0.913826 -0.875567 -0.968315 -0.407542 -0.016419 -0.556271 0.391283 0.743737 0.460341 1 0
12 1.055147 -0.394702 1.258031 -0.517345 -0.021328 -0.557321 0.669125 1.243976 0.262937 0.337908 0.485227 2.007205 1 0
13 1.761253 -0.435392 0.592085 -0.692391 0.535758 -0.708164 -0.382176 0.125232 -0.083947 0.888176 0.994199 1.087612 1 0
14 -0.137413 -0.980041 -1.297302 -0.880795 1.395884 -0.901503 -0.756382 -0.304071 -1.277311 1.299875 -1.162641 -1.274290 1 0
15 0.017749 -0.936126 -1.136240 -0.862914 0.782285 -0.230220 -0.734923 -0.295573 -1.221528 1.299875 -1.058979 -1.237947 1 0
16 -0.382429 -1.386211 -1.437693 -1.268664 0.481905 -1.219324 -1.036865 0.284908 -1.749408 1.299875 -1.532659 -1.596787 1 0
17 -1.227998 1.294887 2.200026 -0.318659 2.364886 -1.078770 -0.029818 -1.427905 -1.667310 0.632080 -1.532496 0.393871 1 0
18 -1.076569 -0.776099 -0.576736 -0.929915 0.188222 -1.067980 0.156056 -0.671783 -0.806376 1.299875 -0.994450 -0.887100 1 0
19 -1.180746 0.385053 2.200026 -0.005979 1.302093 -0.928084 -0.007355 -0.793418 -1.747571 -0.917240 -1.528514 -0.042363 1 0
20 1.192498 -0.224351 -0.206238 0.217971 0.304072 0.949289 -0.514273 1.091683 1.465231 -0.718685 -0.298094 -0.264379 0 0
21 1.213472 0.048184 -0.600330 0.007879 0.177356 1.215438 -0.394095 0.833193 1.465231 -0.381808 -0.415137 -0.479222 0 0
22 -0.004572 -0.052026 2.200026 -0.008377 1.967963 -0.419263 -0.302302 0.046448 -0.343181 1.114245 -0.245566 0.544195 1 0
23 -0.229893 0.630662 0.484895 0.048344 -0.498767 0.662733 -0.509073 0.013777 1.465231 0.438954 -0.681106 -0.745425 0 0
24 0.217943 -0.177066 0.659322 -0.134414 1.650468 -0.583176 -0.559211 -0.406584 -0.598204 1.299875 -0.406852 0.234437 1 0
25 -0.333597 0.972324 -0.721724 -0.554449 -0.493410 0.518707 0.321268 -1.070955 -0.564510 -0.833869 1.695138 -0.783758 0 0
26 -0.602430 -0.028610 -0.826715 -0.377974 -0.696055 -0.182813 -0.120202 -0.886178 -0.584506 -0.673011 1.695138 -0.599349 0 0
27 -0.251812 -0.171571 -0.358821 -0.244636 -0.013843 0.945000 1.038022 -0.727984 0.773167 -0.048067 1.695138 0.563056 0 0
28 -0.368678 2.234146 -0.330756 -0.138877 -0.559553 -0.851160 -0.744847 -0.884446 -0.828451 -1.011890 -0.568588 0.130393 0 0
29 -0.578621 2.234146 -0.562376 -0.105376 -0.523903 -0.825786 -0.690314 -0.809436 -0.870132 -1.142574 -0.536734 0.859716 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
189 -0.186741 -1.165084 -0.921181 1.397816 -0.045251 1.304725 -0.153535 1.038843 1.465231 0.175798 1.209572 -0.558994 0 1
190 -0.163966 0.884601 1.657605 -0.260803 1.379747 -0.732537 -0.007962 1.276498 -0.419568 1.299875 -0.588765 1.807669 1 1
191 -0.254097 -0.617571 0.535718 -0.297186 1.865594 -0.310035 0.916200 1.349280 -0.168599 1.299875 -0.479982 1.556484 1 1
192 -1.359431 0.228998 0.259367 -1.169606 0.516753 -1.080823 1.794240 -0.257346 -1.633246 0.226711 -1.071882 2.007205 1 1
193 1.622617 2.234146 -1.127605 -0.273622 -1.141311 0.831169 -0.681956 -1.023930 0.097381 -1.505036 -0.432936 -0.703132 0 1
194 -0.467818 -0.314482 0.014154 0.025208 -0.807816 -0.093075 3.055878 0.013994 -0.514838 -0.698969 0.430445 -0.302544 0 1
195 -1.423661 -0.599038 -0.707969 -1.084898 -1.206212 -0.474886 1.273960 -1.605019 -1.163780 -1.777082 1.695138 -1.000177 0 1
196 1.761253 -0.325612 0.357954 -0.709817 1.043699 -0.927401 -0.653399 -0.107407 -0.553233 -0.078311 -0.687092 -0.080809 1 1
197 1.761253 1.028574 1.374947 0.116683 1.136600 -0.187955 -0.195137 0.341797 -0.856237 0.366485 -0.658035 0.645642 1 1
198 1.761253 0.050043 0.687757 -0.434894 0.705291 -0.680095 -0.549325 0.816661 -0.567882 0.584770 -0.302600 -0.144573 1 1
199 -0.919189 -0.200271 -1.032880 -0.020720 -1.069924 -0.649964 -0.843737 -0.600356 1.465231 -0.271885 0.807204 -0.901281 0 1
200 -0.988568 -0.189392 -1.028379 0.102199 -1.085401 -0.667213 -0.580786 -0.498427 1.465231 -0.287173 0.737684 -0.938325 0 1
201 -0.779270 0.427427 -0.948100 -0.165441 -1.033638 0.272258 -0.022274 -0.198353 1.465231 -0.072695 1.561935 -0.848980 0 1
202 1.116127 -1.139042 0.168460 -1.122851 -0.854043 1.067951 -0.735589 0.656822 0.343006 1.299875 0.776152 -0.247153 0 1
203 0.762440 -1.190866 -0.050517 -1.116856 -1.095215 0.632777 -0.797158 1.167398 1.149430 1.299875 1.392402 0.047424 0 1
204 0.058411 -1.128496 -1.328850 -1.229911 -1.074617 -0.085788 -0.812628 0.326031 0.625580 1.299875 1.290873 -0.077297 0 1
205 -0.720270 -0.697672 -0.720394 1.813110 -0.859595 -0.861783 -0.908744 1.216314 1.411592 -0.405169 1.695138 -0.439681 0 1
206 -0.364461 -0.010160 -1.095500 0.914425 -1.040229 0.885108 -0.998962 0.772555 1.465231 -0.838613 0.940082 -0.955178 0 1
207 -0.272236 -0.196860 -0.727982 0.141418 -0.904008 -0.065613 -0.944991 0.795648 0.611803 0.903563 1.695137 -0.158991 0 1
208 1.761253 -0.082523 -0.122308 0.265346 -0.485445 0.500219 0.126422 -0.354469 0.139245 -0.560750 1.149004 0.354466 0 1
209 1.573516 -0.161473 -0.359909 1.241760 -0.664508 1.002463 -0.076931 0.519366 1.465231 -0.646843 0.532185 -0.215564 0 1
210 1.761253 0.417875 -0.918851 -0.929813 -0.982357 -0.771036 -0.949451 -0.679224 -0.652751 -1.604058 -0.786724 -0.848956 0 1
211 1.761253 0.145550 1.710575 1.914520 1.233461 2.474950 0.953725 1.263177 0.830563 0.011243 0.602765 -0.024981 0 1
212 0.470346 -0.334996 2.200026 1.858653 0.847856 1.364055 0.061293 1.366431 -0.301856 -0.501569 -0.235203 -0.695720 0 1
213 -0.576457 -0.914445 1.070087 0.337357 0.306857 0.394672 -0.372356 -0.450091 -0.240434 0.165141 1.695138 0.345150 0 1
214 1.761253 -0.591066 -0.690824 2.065965 -0.721336 -0.340791 -0.483151 0.855908 0.529996 -1.116013 0.527710 -0.050391 0 1
215 1.490806 -1.368871 -1.151960 2.846487 -0.924825 0.052478 -0.970103 1.992593 0.913611 -0.452243 0.613814 -1.529552 0 1
216 0.191801 -1.348512 -1.315236 -0.455163 -1.244101 -1.240530 -1.036865 -0.531344 1.465231 -1.809822 -1.350349 -1.513618 0 1
217 -0.002098 2.039653 -0.752917 0.971355 -0.795869 0.431147 -0.753214 0.043687 1.465231 -1.105865 -0.938582 -0.984328 0 1
218 -0.098688 -0.923087 -0.917548 0.312310 -0.183969 0.248120 -0.545773 0.584070 0.733937 -0.697562 1.695138 -0.497086 0 1

219 rows × 14 columns

In [164]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[164]:
<matplotlib.axes._subplots.AxesSubplot at 0x1ef623e6a20>
In [165]:
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[5]))

Urban Place

ANN

In [477]:
X = df_n_ps_std_ch[5]
In [478]:
y = df_n_ps[5]['chosen']
In [479]:
X_train, X_test, y_train, y_test = train_test_split(X, y)
In [480]:
X_train.shape
Out[480]:
(168, 12)
In [170]:
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
In [171]:
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10), 
                          (10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
In [172]:
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)

np.random.seed(1234)
parametros = {'activation': activation_vec,
              'max_iter':max_iter_vec,
              'hidden_layer_sizes': hidden_layer_sizes_vec,
              'learning_rate_init': learning_rate_init_vec#,
              #'batch_size': batch_size_vec
              }
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
In [173]:
grid.fit(X_train, y_train)

print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
    grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.004, 'max_iter': 100}, que permiten obtener un Accuracy de 77.98% y un Kappa del 46.34
Tiempo total: 26.27 minutos
In [481]:
n0=X_train.shape[1]
grid.best_params_['hidden_layer_sizes'] = [20]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
    ns.append(grid.best_params_['hidden_layer_sizes'][i])

ns.append(1)
lr =0.004
epochs = 100
In [482]:
input_tensor = Input(shape = (n0,))
In [483]:
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
    hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
    
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
In [484]:
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
In [485]:
model.summary()
Model: "model_26"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_26 (InputLayer)        (None, 12)                0         
_________________________________________________________________
dense_82 (Dense)             (None, 20)                260       
_________________________________________________________________
dense_83 (Dense)             (None, 1)                 21        
=================================================================
Total params: 281
Trainable params: 281
Non-trainable params: 0
_________________________________________________________________
In [486]:
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), 
            callbacks=[
                keras.callbacks.ReduceLROnPlateau(
                    monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
                )
            ]
         )
Train on 168 samples, validate on 57 samples
Epoch 1/100
168/168 [==============================] - 0s 1ms/step - loss: 0.8058 - accuracy: 0.5119 - val_loss: 0.6882 - val_accuracy: 0.6140
Epoch 2/100
168/168 [==============================] - 0s 101us/step - loss: 0.7272 - accuracy: 0.5298 - val_loss: 0.6497 - val_accuracy: 0.6316
Epoch 3/100
168/168 [==============================] - 0s 89us/step - loss: 0.6829 - accuracy: 0.5536 - val_loss: 0.6268 - val_accuracy: 0.6316
Epoch 4/100
168/168 [==============================] - 0s 101us/step - loss: 0.6512 - accuracy: 0.6012 - val_loss: 0.5988 - val_accuracy: 0.6667
Epoch 5/100
168/168 [==============================] - 0s 83us/step - loss: 0.6359 - accuracy: 0.6369 - val_loss: 0.5762 - val_accuracy: 0.6491
Epoch 6/100
168/168 [==============================] - 0s 71us/step - loss: 0.6232 - accuracy: 0.6488 - val_loss: 0.5670 - val_accuracy: 0.6667
Epoch 7/100
168/168 [==============================] - 0s 77us/step - loss: 0.6153 - accuracy: 0.6250 - val_loss: 0.5571 - val_accuracy: 0.6667
Epoch 8/100
168/168 [==============================] - 0s 83us/step - loss: 0.6049 - accuracy: 0.6488 - val_loss: 0.5535 - val_accuracy: 0.6491
Epoch 9/100
168/168 [==============================] - 0s 89us/step - loss: 0.5965 - accuracy: 0.6429 - val_loss: 0.5446 - val_accuracy: 0.6842
Epoch 10/100
168/168 [==============================] - 0s 89us/step - loss: 0.5915 - accuracy: 0.6548 - val_loss: 0.5354 - val_accuracy: 0.6667
Epoch 11/100
168/168 [==============================] - 0s 77us/step - loss: 0.5859 - accuracy: 0.6667 - val_loss: 0.5202 - val_accuracy: 0.7193
Epoch 12/100
168/168 [==============================] - 0s 77us/step - loss: 0.5819 - accuracy: 0.6845 - val_loss: 0.5139 - val_accuracy: 0.7193
Epoch 13/100
168/168 [==============================] - 0s 83us/step - loss: 0.5778 - accuracy: 0.6845 - val_loss: 0.5150 - val_accuracy: 0.7193
Epoch 14/100
168/168 [==============================] - 0s 119us/step - loss: 0.5736 - accuracy: 0.6964 - val_loss: 0.5169 - val_accuracy: 0.7018
Epoch 15/100
168/168 [==============================] - 0s 83us/step - loss: 0.5707 - accuracy: 0.7024 - val_loss: 0.5140 - val_accuracy: 0.7193
Epoch 16/100
168/168 [==============================] - 0s 83us/step - loss: 0.5671 - accuracy: 0.7024 - val_loss: 0.5084 - val_accuracy: 0.7193
Epoch 17/100
168/168 [==============================] - 0s 77us/step - loss: 0.5643 - accuracy: 0.7083 - val_loss: 0.5028 - val_accuracy: 0.7368
Epoch 18/100
168/168 [==============================] - 0s 77us/step - loss: 0.5610 - accuracy: 0.7083 - val_loss: 0.4946 - val_accuracy: 0.7544
Epoch 19/100
168/168 [==============================] - 0s 83us/step - loss: 0.5584 - accuracy: 0.7024 - val_loss: 0.4932 - val_accuracy: 0.7544
Epoch 20/100
168/168 [==============================] - 0s 113us/step - loss: 0.5552 - accuracy: 0.7083 - val_loss: 0.4982 - val_accuracy: 0.7368
Epoch 21/100
168/168 [==============================] - 0s 89us/step - loss: 0.5537 - accuracy: 0.7083 - val_loss: 0.5029 - val_accuracy: 0.7368
Epoch 22/100
168/168 [==============================] - 0s 89us/step - loss: 0.5501 - accuracy: 0.7143 - val_loss: 0.4965 - val_accuracy: 0.7544
Epoch 23/100
168/168 [==============================] - 0s 83us/step - loss: 0.5468 - accuracy: 0.7083 - val_loss: 0.4894 - val_accuracy: 0.7544
Epoch 24/100
168/168 [==============================] - 0s 83us/step - loss: 0.5440 - accuracy: 0.7083 - val_loss: 0.4788 - val_accuracy: 0.7544
Epoch 25/100
168/168 [==============================] - 0s 89us/step - loss: 0.5411 - accuracy: 0.7083 - val_loss: 0.4753 - val_accuracy: 0.7544
Epoch 26/100
168/168 [==============================] - 0s 77us/step - loss: 0.5402 - accuracy: 0.7143 - val_loss: 0.4737 - val_accuracy: 0.7544
Epoch 27/100
168/168 [==============================] - 0s 89us/step - loss: 0.5353 - accuracy: 0.7143 - val_loss: 0.4838 - val_accuracy: 0.7368
Epoch 28/100
168/168 [==============================] - 0s 89us/step - loss: 0.5354 - accuracy: 0.7083 - val_loss: 0.5011 - val_accuracy: 0.7193

Epoch 00028: ReduceLROnPlateau reducing learning rate to 0.0020000000949949026.
Epoch 29/100
168/168 [==============================] - 0s 83us/step - loss: 0.5308 - accuracy: 0.7202 - val_loss: 0.5022 - val_accuracy: 0.7193
Epoch 30/100
168/168 [==============================] - 0s 71us/step - loss: 0.5291 - accuracy: 0.7202 - val_loss: 0.4954 - val_accuracy: 0.7368
Epoch 31/100
168/168 [==============================] - 0s 77us/step - loss: 0.5262 - accuracy: 0.7143 - val_loss: 0.4939 - val_accuracy: 0.7368
Epoch 32/100
168/168 [==============================] - 0s 71us/step - loss: 0.5243 - accuracy: 0.7262 - val_loss: 0.4921 - val_accuracy: 0.7368
Epoch 33/100
168/168 [==============================] - 0s 83us/step - loss: 0.5228 - accuracy: 0.7262 - val_loss: 0.4889 - val_accuracy: 0.7368
Epoch 34/100
168/168 [==============================] - 0s 77us/step - loss: 0.5217 - accuracy: 0.7381 - val_loss: 0.4878 - val_accuracy: 0.7368
Epoch 35/100
168/168 [==============================] - 0s 77us/step - loss: 0.5206 - accuracy: 0.7381 - val_loss: 0.4875 - val_accuracy: 0.7544
Epoch 36/100
168/168 [==============================] - 0s 89us/step - loss: 0.5192 - accuracy: 0.7381 - val_loss: 0.4865 - val_accuracy: 0.7544
Epoch 37/100
168/168 [==============================] - 0s 77us/step - loss: 0.5173 - accuracy: 0.7440 - val_loss: 0.4868 - val_accuracy: 0.7544
Epoch 38/100
168/168 [==============================] - 0s 71us/step - loss: 0.5151 - accuracy: 0.7321 - val_loss: 0.4868 - val_accuracy: 0.7544

Epoch 00038: ReduceLROnPlateau reducing learning rate to 0.0010000000474974513.
Epoch 39/100
168/168 [==============================] - 0s 77us/step - loss: 0.5137 - accuracy: 0.7321 - val_loss: 0.4865 - val_accuracy: 0.7368
Epoch 40/100
168/168 [==============================] - 0s 77us/step - loss: 0.5125 - accuracy: 0.7381 - val_loss: 0.4853 - val_accuracy: 0.7544
Epoch 41/100
168/168 [==============================] - 0s 77us/step - loss: 0.5120 - accuracy: 0.7321 - val_loss: 0.4830 - val_accuracy: 0.7544
Epoch 42/100
168/168 [==============================] - 0s 77us/step - loss: 0.5111 - accuracy: 0.7381 - val_loss: 0.4820 - val_accuracy: 0.7544
Epoch 43/100
168/168 [==============================] - 0s 71us/step - loss: 0.5102 - accuracy: 0.7381 - val_loss: 0.4811 - val_accuracy: 0.7544
Epoch 44/100
168/168 [==============================] - 0s 77us/step - loss: 0.5093 - accuracy: 0.7440 - val_loss: 0.4816 - val_accuracy: 0.7544
Epoch 45/100
168/168 [==============================] - 0s 83us/step - loss: 0.5084 - accuracy: 0.7440 - val_loss: 0.4807 - val_accuracy: 0.7544
Epoch 46/100
168/168 [==============================] - 0s 77us/step - loss: 0.5079 - accuracy: 0.7440 - val_loss: 0.4797 - val_accuracy: 0.7544
Epoch 47/100
168/168 [==============================] - 0s 119us/step - loss: 0.5069 - accuracy: 0.7440 - val_loss: 0.4795 - val_accuracy: 0.7544
Epoch 48/100
168/168 [==============================] - 0s 83us/step - loss: 0.5058 - accuracy: 0.7381 - val_loss: 0.4782 - val_accuracy: 0.7544

Epoch 00048: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257.
Epoch 49/100
168/168 [==============================] - 0s 71us/step - loss: 0.5051 - accuracy: 0.7381 - val_loss: 0.4786 - val_accuracy: 0.7544
Epoch 50/100
168/168 [==============================] - 0s 83us/step - loss: 0.5045 - accuracy: 0.7381 - val_loss: 0.4792 - val_accuracy: 0.7368
Epoch 51/100
168/168 [==============================] - 0s 77us/step - loss: 0.5039 - accuracy: 0.7381 - val_loss: 0.4789 - val_accuracy: 0.7368
Epoch 52/100
168/168 [==============================] - 0s 83us/step - loss: 0.5037 - accuracy: 0.7381 - val_loss: 0.4790 - val_accuracy: 0.7368
Epoch 53/100
168/168 [==============================] - 0s 77us/step - loss: 0.5031 - accuracy: 0.7440 - val_loss: 0.4784 - val_accuracy: 0.7368
Epoch 54/100
168/168 [==============================] - 0s 77us/step - loss: 0.5030 - accuracy: 0.7440 - val_loss: 0.4770 - val_accuracy: 0.7368
Epoch 55/100
168/168 [==============================] - 0s 71us/step - loss: 0.5024 - accuracy: 0.7440 - val_loss: 0.4769 - val_accuracy: 0.7368
Epoch 56/100
168/168 [==============================] - 0s 77us/step - loss: 0.5020 - accuracy: 0.7440 - val_loss: 0.4774 - val_accuracy: 0.7368
Epoch 57/100
168/168 [==============================] - 0s 71us/step - loss: 0.5016 - accuracy: 0.7440 - val_loss: 0.4769 - val_accuracy: 0.7368
Epoch 58/100
168/168 [==============================] - 0s 65us/step - loss: 0.5012 - accuracy: 0.7440 - val_loss: 0.4766 - val_accuracy: 0.7368

Epoch 00058: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628.
Epoch 59/100
168/168 [==============================] - 0s 77us/step - loss: 0.5007 - accuracy: 0.7440 - val_loss: 0.4767 - val_accuracy: 0.7368
Epoch 60/100
168/168 [==============================] - 0s 83us/step - loss: 0.5005 - accuracy: 0.7440 - val_loss: 0.4769 - val_accuracy: 0.7368
Epoch 61/100
168/168 [==============================] - 0s 83us/step - loss: 0.5002 - accuracy: 0.7440 - val_loss: 0.4776 - val_accuracy: 0.7368
Epoch 62/100
168/168 [==============================] - 0s 71us/step - loss: 0.5000 - accuracy: 0.7440 - val_loss: 0.4780 - val_accuracy: 0.7368
Epoch 63/100
168/168 [==============================] - 0s 77us/step - loss: 0.4997 - accuracy: 0.7440 - val_loss: 0.4782 - val_accuracy: 0.7193
Epoch 64/100
168/168 [==============================] - 0s 77us/step - loss: 0.4996 - accuracy: 0.7500 - val_loss: 0.4786 - val_accuracy: 0.7193
Epoch 65/100
168/168 [==============================] - 0s 71us/step - loss: 0.4993 - accuracy: 0.7560 - val_loss: 0.4793 - val_accuracy: 0.7193
Epoch 66/100
168/168 [==============================] - 0s 65us/step - loss: 0.4990 - accuracy: 0.7560 - val_loss: 0.4798 - val_accuracy: 0.7193
Epoch 67/100
168/168 [==============================] - 0s 83us/step - loss: 0.4988 - accuracy: 0.7560 - val_loss: 0.4802 - val_accuracy: 0.7193
Epoch 68/100
168/168 [==============================] - 0s 101us/step - loss: 0.4985 - accuracy: 0.7619 - val_loss: 0.4807 - val_accuracy: 0.7193

Epoch 00068: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814.
Epoch 69/100
168/168 [==============================] - 0s 95us/step - loss: 0.4983 - accuracy: 0.7560 - val_loss: 0.4809 - val_accuracy: 0.7193
Epoch 70/100
168/168 [==============================] - 0s 89us/step - loss: 0.4982 - accuracy: 0.7560 - val_loss: 0.4812 - val_accuracy: 0.7193
Epoch 71/100
168/168 [==============================] - 0s 77us/step - loss: 0.4981 - accuracy: 0.7560 - val_loss: 0.4815 - val_accuracy: 0.7193
Epoch 72/100
168/168 [==============================] - 0s 83us/step - loss: 0.4979 - accuracy: 0.7560 - val_loss: 0.4817 - val_accuracy: 0.7193
Epoch 73/100
168/168 [==============================] - 0s 101us/step - loss: 0.4979 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 74/100
168/168 [==============================] - 0s 77us/step - loss: 0.4978 - accuracy: 0.7560 - val_loss: 0.4821 - val_accuracy: 0.7193
Epoch 75/100
168/168 [==============================] - 0s 65us/step - loss: 0.4976 - accuracy: 0.7560 - val_loss: 0.4822 - val_accuracy: 0.7193
Epoch 76/100
168/168 [==============================] - 0s 59us/step - loss: 0.4975 - accuracy: 0.7560 - val_loss: 0.4822 - val_accuracy: 0.7193
Epoch 77/100
168/168 [==============================] - 0s 65us/step - loss: 0.4974 - accuracy: 0.7560 - val_loss: 0.4822 - val_accuracy: 0.7193
Epoch 78/100
168/168 [==============================] - 0s 77us/step - loss: 0.4973 - accuracy: 0.7560 - val_loss: 0.4821 - val_accuracy: 0.7193

Epoch 00078: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05.
Epoch 79/100
168/168 [==============================] - 0s 65us/step - loss: 0.4972 - accuracy: 0.7560 - val_loss: 0.4820 - val_accuracy: 0.7193
Epoch 80/100
168/168 [==============================] - 0s 65us/step - loss: 0.4971 - accuracy: 0.7560 - val_loss: 0.4821 - val_accuracy: 0.7193
Epoch 81/100
168/168 [==============================] - 0s 65us/step - loss: 0.4971 - accuracy: 0.7560 - val_loss: 0.4820 - val_accuracy: 0.7193
Epoch 82/100
168/168 [==============================] - 0s 65us/step - loss: 0.4970 - accuracy: 0.7560 - val_loss: 0.4820 - val_accuracy: 0.7193
Epoch 83/100
168/168 [==============================] - 0s 65us/step - loss: 0.4970 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193
Epoch 84/100
168/168 [==============================] - 0s 65us/step - loss: 0.4969 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193
Epoch 85/100
168/168 [==============================] - 0s 59us/step - loss: 0.4968 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193
Epoch 86/100
168/168 [==============================] - 0s 65us/step - loss: 0.4968 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193
Epoch 87/100
168/168 [==============================] - 0s 59us/step - loss: 0.4967 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 88/100
168/168 [==============================] - 0s 59us/step - loss: 0.4967 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193

Epoch 00088: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05.
Epoch 89/100
168/168 [==============================] - 0s 77us/step - loss: 0.4966 - accuracy: 0.7560 - val_loss: 0.4819 - val_accuracy: 0.7193
Epoch 90/100
168/168 [==============================] - 0s 65us/step - loss: 0.4966 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 91/100
168/168 [==============================] - 0s 65us/step - loss: 0.4966 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 92/100
168/168 [==============================] - 0s 65us/step - loss: 0.4965 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 93/100
168/168 [==============================] - 0s 59us/step - loss: 0.4965 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 94/100
168/168 [==============================] - 0s 59us/step - loss: 0.4965 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 95/100
168/168 [==============================] - 0s 65us/step - loss: 0.4965 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 96/100
168/168 [==============================] - 0s 71us/step - loss: 0.4964 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 97/100
168/168 [==============================] - 0s 59us/step - loss: 0.4964 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 98/100
168/168 [==============================] - 0s 107us/step - loss: 0.4964 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193

Epoch 00098: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05.
Epoch 99/100
168/168 [==============================] - 0s 77us/step - loss: 0.4963 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
Epoch 100/100
168/168 [==============================] - 0s 65us/step - loss: 0.4963 - accuracy: 0.7560 - val_loss: 0.4818 - val_accuracy: 0.7193
In [487]:
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

print(epochs)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 100)
In [488]:
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
57/57 [==============================] - 0s 70us/step
test loss: 0.48182071510114166, test accuracy: 0.719298243522644
In [489]:
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC:  0.773015873015873
In [490]:
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa:  0.24378109452736318

KMeans

In [66]:
X
Out[66]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13
0 -0.339415 0.847773 0.497198 -0.389310 1.225458 1.947033 -0.736267 0.492219 0.576682 1.504697 -1.796460 0.724954 0.958600
1 0.587658 -1.195426 0.636375 0.199876 0.765321 0.061181 0.379367 -0.440867 0.232893 1.339920 0.110001 0.807525 0.815678
2 1.465595 -2.307943 0.354567 -0.058273 -1.298853 -0.811453 -1.551580 -3.934320 -1.079432 2.546130 1.421407 0.639359 0.199094
3 0.749403 -1.690498 -0.125200 -1.016135 0.825845 0.271444 -0.104786 -0.992141 0.049182 1.425948 -0.343269 -0.789558 -0.411898
4 -0.280577 0.393332 0.744917 2.411400 -0.777421 -0.420018 1.258355 -1.544565 -0.498071 0.421527 -0.632908 -0.056846 -0.072348
5 -0.158690 0.404891 -0.147920 -0.299241 -0.786974 0.697216 0.290501 0.019739 -1.468086 -0.346174 -0.086965 0.026492 1.019512
6 1.646777 0.772744 -1.425228 -0.562610 -1.556076 0.533289 -0.404271 1.676958 0.979516 0.415548 0.544719 0.433332 0.204271
7 1.124970 0.506236 0.738993 1.984485 -0.928706 -0.494097 -0.707105 -0.494778 -1.642929 0.207467 0.181382 2.431721 0.848697
8 0.920059 1.438862 -2.048354 1.503567 -2.801303 0.567132 -0.745441 0.569519 0.130917 1.965436 -0.034797 1.164878 0.074074
9 0.182544 0.310622 0.067722 0.870138 0.168366 0.682045 -0.191296 -0.144962 -0.630020 -0.284032 -0.315301 0.344841 0.495167
10 0.168663 0.389450 0.034360 1.213392 0.248437 0.870618 -0.460824 -0.174734 -0.710502 -0.228408 -0.265153 0.349416 0.584114
11 0.153010 -0.118336 0.639531 1.504522 0.937909 0.356048 -0.089987 -0.628522 0.064203 0.966049 0.403915 -0.943626 0.173874
12 0.132578 0.261966 -2.871493 -3.398160 -0.256458 1.596532 -0.358711 0.175955 -0.499075 0.949085 2.235525 -0.197712 -0.272366
13 1.094629 0.885150 -1.130672 -0.083270 0.672482 0.750453 -0.863949 0.140540 0.423312 -0.305155 -0.424905 0.318660 0.885900
14 0.771472 0.364448 -0.454696 0.434253 0.912699 0.745924 -0.073390 -0.406473 0.450765 0.323180 -0.458826 -0.132295 0.495454
15 0.677561 0.166795 0.746471 0.075191 0.867924 -1.621678 0.771146 -0.067286 0.557998 -0.093593 0.020233 -0.800013 -0.629188
16 -0.032353 1.227345 -0.188580 0.927210 0.016663 1.001867 -0.473811 0.782387 1.542760 -0.345478 -0.838104 -0.439443 1.179204
17 0.459031 1.258961 -0.329412 1.391790 -0.208888 1.059241 -1.245671 0.619153 0.245780 0.644548 -0.602629 -0.928581 0.739885
18 -0.359172 0.051214 -0.603962 0.778896 1.630471 1.802477 1.486205 -0.140738 -0.894366 0.736624 2.114721 1.078175 -0.965785
19 0.209859 -0.615399 -0.676895 0.735655 0.805509 -0.696793 1.073068 0.240429 -0.205934 -0.759693 0.672843 0.569482 -0.455391
20 0.127381 -0.265099 -0.258801 -0.127568 0.649447 0.244473 1.897421 -0.344616 -0.593159 0.065147 1.787607 1.219355 -0.171813
21 1.222717 0.409860 1.311826 0.703873 0.322062 0.305461 -0.522644 -0.750833 0.001767 0.017953 0.254329 -0.227762 -0.614790
22 1.173352 0.490500 0.742825 -0.028159 -0.272396 -0.502733 -0.759443 -1.031924 -0.157975 0.075659 0.604220 0.143298 -0.001849
23 1.069960 0.858822 -0.795544 0.076688 0.851875 0.735014 -0.758779 0.065595 0.532667 -0.391858 -0.497019 0.240822 0.848126
24 0.581377 -0.804045 0.399887 1.535671 0.245878 0.904192 -0.233991 -0.925983 0.212280 0.499535 -0.024926 -0.925999 1.294925
25 0.161110 0.025075 0.716318 1.532230 0.889883 0.353167 -0.058787 -0.593046 0.093773 0.927085 0.199691 -0.979872 0.232850
26 0.431443 0.442713 0.259120 0.045533 0.102675 0.367606 0.054320 0.942924 0.180609 0.550983 0.265291 0.321252 -0.830969
27 0.344525 -1.140315 -0.725453 -0.547965 0.449924 0.303904 1.053624 1.051712 0.509322 0.181611 -0.519979 -1.134490 -1.439105
28 -0.041565 0.671274 0.195143 0.247294 0.531620 1.050124 0.311358 0.988161 -0.198869 0.387795 1.757366 1.351684 0.194840
29 0.417845 -1.134173 -0.760709 -0.605264 0.077464 0.533333 1.104524 2.124971 0.083548 0.801730 0.092534 -1.281628 -1.468782
... ... ... ... ... ... ... ... ... ... ... ... ... ...
225 1.532114 -1.060006 -0.434145 -0.999435 -1.259462 0.039140 -0.802013 -0.655286 0.714448 1.005958 -0.086372 0.537392 0.054440
226 -0.942320 1.172080 0.506725 -0.230675 -0.104635 0.898742 -1.107001 -1.182148 -0.940991 0.232366 1.778224 0.975251 1.731084
227 1.421974 0.631029 -0.563813 -0.694595 -0.673270 0.929022 0.476907 -1.025173 -0.813644 -0.060006 -0.738730 -0.558099 0.057654
228 -1.473385 -0.806223 1.849423 -1.252541 0.941013 -0.872947 -1.812392 -0.242718 -0.097212 -0.510500 -0.232195 -0.546399 0.945530
229 -1.135926 -0.772372 1.164844 -1.022517 0.630202 -0.496999 -1.101656 -0.168921 -0.295159 -0.587401 0.369033 -0.266325 0.604469
230 -1.085049 0.879566 0.442593 0.128917 0.393498 0.531555 0.392194 1.418515 0.891015 -0.348926 -0.756201 -0.838584 -0.015971
231 -0.352258 0.556982 0.530520 0.443818 0.300921 0.032128 -0.797384 -0.573532 0.398084 0.328875 -0.274964 -1.300920 0.254456
232 -1.190363 0.797356 0.758472 0.587917 0.890540 0.471925 0.105793 0.680721 0.230834 -0.150709 -0.816744 -0.470618 0.371198
233 -0.651003 -0.586618 1.326854 -0.451354 0.507113 0.165474 -0.919675 -0.448249 -1.310940 -1.372737 0.406029 -1.414627 -0.434858
234 -1.459511 -0.516281 1.631699 -1.141842 0.584621 -0.458541 -1.428877 -0.934556 -0.216455 -0.049794 0.095580 0.387068 0.693730
235 -0.726984 0.702447 0.798069 -0.320660 0.530902 1.019988 0.144995 0.207847 0.039592 0.220761 0.762941 0.575034 0.671517
236 -0.300986 -0.404923 0.715406 0.245380 -0.427936 -0.334843 -0.228084 -0.330898 -0.674327 0.199560 0.827455 0.016433 0.866789
237 -0.736244 0.088611 0.910051 0.437100 0.258256 0.363828 -0.415290 -0.717445 -0.012727 0.436925 -0.786954 -1.217376 0.352825
238 0.610473 -2.664315 1.303652 -2.022376 1.500032 -1.280926 -1.249533 0.432111 -0.768558 0.291156 -0.092312 0.053770 -0.401166
239 -2.045424 -2.954642 0.302601 -0.868092 -1.038134 -1.230777 0.514329 0.057591 -1.023895 0.275395 -1.450282 0.386242 0.318763
240 0.329793 -1.367570 -1.454329 -0.207924 -0.723609 -0.149025 -0.085298 -0.011595 -0.240239 -0.009120 -0.325229 -0.025722 0.114182
241 -1.919591 1.382172 -0.134161 0.837967 -0.687780 0.944303 -0.258652 -0.742178 0.386031 -1.178099 -1.843543 -0.710556 -0.318561
242 -2.087669 1.400006 -0.494964 0.451717 -0.759188 0.736625 0.133121 -0.196031 1.121231 0.474128 -0.345937 -0.409324 -0.442069
243 -2.131652 0.439305 -0.612226 0.854126 -0.494550 0.825299 0.301373 -0.018964 0.690556 -0.078762 -0.709495 -0.075857 -0.418656
244 -1.611989 -0.756403 -0.410917 1.075909 0.297336 -1.317576 1.115011 -0.467065 -0.768378 1.615499 1.611125 -1.018782 -1.798744
245 -0.142010 0.000190 -0.063461 -0.506353 -0.386942 -0.256144 0.270621 -1.497417 0.507892 0.456828 -0.431169 -0.978417 0.015849
246 -1.263975 -1.168117 -1.396090 -0.312016 1.862268 1.400290 0.646060 -0.686864 0.418524 -0.069926 -0.653856 -0.853617 -0.106814
247 -0.507700 0.899825 1.510153 1.083642 2.081451 0.589016 0.901321 0.658808 0.152596 0.176442 -0.447633 0.287838 0.650479
248 -0.159768 0.518093 2.197018 0.698491 0.476336 -2.014255 -1.614667 -0.397282 -1.781932 -0.208894 1.650551 -0.771436 -0.987237
249 -1.037899 1.016712 2.774230 0.665468 -0.385673 0.587263 -0.121609 -0.331379 0.622484 -0.387131 -0.276584 0.218207 1.689216
250 -0.526923 -1.169944 0.474875 -0.789231 0.369827 -0.537003 -1.089843 -0.173366 -0.023237 -0.142334 0.740065 0.813114 0.872556
251 -0.770856 -1.024349 -0.019140 -0.097521 0.092703 0.369242 -0.273901 0.190740 -0.074032 0.113055 0.140291 -0.696275 0.166679
252 -0.905458 -0.790575 0.206164 -0.723816 -0.444860 0.107833 -0.734514 -0.533865 -0.634334 0.320526 0.088428 -0.348210 0.347201
253 -1.378235 -0.338405 0.016815 -0.394563 0.034043 1.023865 -0.303960 -1.316121 0.198697 0.670577 0.809574 0.580565 0.056004
254 -0.199959 -2.035812 -0.904507 -1.511975 -0.437843 0.262972 -1.943788 -1.963300 -2.256227 0.354369 -0.039829 0.882325 0.139307

255 rows × 13 columns

In [67]:
WSSs = []
for i in range(1,15) :
    km = KMeans(n_clusters=i, random_state=0)
    km.fit(X)
    WSSs.append(km.inertia_)
WSSs
Out[67]:
[3315.0,
 2972.7888695817974,
 2748.18187155972,
 2544.9420084212106,
 2413.687059384553,
 2278.037996783226,
 2213.3487507256823,
 2123.4282707474663,
 2067.8299633414163,
 1977.777252698108,
 1956.5229777214513,
 1880.0296166971755,
 1815.5096049846275,
 1785.9955747862728]
In [68]:
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
Out[68]:
[<matplotlib.lines.Line2D at 0x1e82ae84f98>]

K=6

In [69]:
kmeans_ch = KMeans(n_clusters=6, random_state=0, n_init=10)
kmeans_ch.fit(X)
Out[69]:
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
    n_clusters=6, n_init=10, n_jobs=1, precompute_distances='auto',
    random_state=0, tol=0.0001, verbose=0)
In [70]:
kmeans_ch.labels_
Out[70]:
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
       0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
       2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
       3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
       4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
       1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
       4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
       4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
       4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
       4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
       1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
       4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
In [71]:
clusters_ch = kmeans_ch.predict(X)
clusters_ch
Out[71]:
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
       0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
       2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
       3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
       4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
       1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
       4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
       4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
       4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
       4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
       1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
       4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
In [72]:
X.loc[:,'Cluster'] = clusters_ch
X.loc[:,'chosen'] = list(y)
In [73]:
X
Out[73]:
mfccfiles_1 mfccfiles_2 mfccfiles_3 mfccfiles_4 mfccfiles_5 mfccfiles_6 mfccfiles_7 mfccfiles_8 mfccfiles_9 mfccfiles_10 mfccfiles_11 mfccfiles_12 mfccfiles_13 Cluster chosen
0 -0.339415 0.847773 0.497198 -0.389310 1.225458 1.947033 -0.736267 0.492219 0.576682 1.504697 -1.796460 0.724954 0.958600 4 0
1 0.587658 -1.195426 0.636375 0.199876 0.765321 0.061181 0.379367 -0.440867 0.232893 1.339920 0.110001 0.807525 0.815678 2 0
2 1.465595 -2.307943 0.354567 -0.058273 -1.298853 -0.811453 -1.551580 -3.934320 -1.079432 2.546130 1.421407 0.639359 0.199094 2 0
3 0.749403 -1.690498 -0.125200 -1.016135 0.825845 0.271444 -0.104786 -0.992141 0.049182 1.425948 -0.343269 -0.789558 -0.411898 2 0
4 -0.280577 0.393332 0.744917 2.411400 -0.777421 -0.420018 1.258355 -1.544565 -0.498071 0.421527 -0.632908 -0.056846 -0.072348 0 0
5 -0.158690 0.404891 -0.147920 -0.299241 -0.786974 0.697216 0.290501 0.019739 -1.468086 -0.346174 -0.086965 0.026492 1.019512 1 0
6 1.646777 0.772744 -1.425228 -0.562610 -1.556076 0.533289 -0.404271 1.676958 0.979516 0.415548 0.544719 0.433332 0.204271 1 0
7 1.124970 0.506236 0.738993 1.984485 -0.928706 -0.494097 -0.707105 -0.494778 -1.642929 0.207467 0.181382 2.431721 0.848697 0 0
8 0.920059 1.438862 -2.048354 1.503567 -2.801303 0.567132 -0.745441 0.569519 0.130917 1.965436 -0.034797 1.164878 0.074074 1 0
9 0.182544 0.310622 0.067722 0.870138 0.168366 0.682045 -0.191296 -0.144962 -0.630020 -0.284032 -0.315301 0.344841 0.495167 4 0
10 0.168663 0.389450 0.034360 1.213392 0.248437 0.870618 -0.460824 -0.174734 -0.710502 -0.228408 -0.265153 0.349416 0.584114 4 0
11 0.153010 -0.118336 0.639531 1.504522 0.937909 0.356048 -0.089987 -0.628522 0.064203 0.966049 0.403915 -0.943626 0.173874 4 0
12 0.132578 0.261966 -2.871493 -3.398160 -0.256458 1.596532 -0.358711 0.175955 -0.499075 0.949085 2.235525 -0.197712 -0.272366 1 0
13 1.094629 0.885150 -1.130672 -0.083270 0.672482 0.750453 -0.863949 0.140540 0.423312 -0.305155 -0.424905 0.318660 0.885900 4 0
14 0.771472 0.364448 -0.454696 0.434253 0.912699 0.745924 -0.073390 -0.406473 0.450765 0.323180 -0.458826 -0.132295 0.495454 4 0
15 0.677561 0.166795 0.746471 0.075191 0.867924 -1.621678 0.771146 -0.067286 0.557998 -0.093593 0.020233 -0.800013 -0.629188 3 0
16 -0.032353 1.227345 -0.188580 0.927210 0.016663 1.001867 -0.473811 0.782387 1.542760 -0.345478 -0.838104 -0.439443 1.179204 4 0
17 0.459031 1.258961 -0.329412 1.391790 -0.208888 1.059241 -1.245671 0.619153 0.245780 0.644548 -0.602629 -0.928581 0.739885 4 0
18 -0.359172 0.051214 -0.603962 0.778896 1.630471 1.802477 1.486205 -0.140738 -0.894366 0.736624 2.114721 1.078175 -0.965785 4 0
19 0.209859 -0.615399 -0.676895 0.735655 0.805509 -0.696793 1.073068 0.240429 -0.205934 -0.759693 0.672843 0.569482 -0.455391 4 0
20 0.127381 -0.265099 -0.258801 -0.127568 0.649447 0.244473 1.897421 -0.344616 -0.593159 0.065147 1.787607 1.219355 -0.171813 4 0
21 1.222717 0.409860 1.311826 0.703873 0.322062 0.305461 -0.522644 -0.750833 0.001767 0.017953 0.254329 -0.227762 -0.614790 0 0
22 1.173352 0.490500 0.742825 -0.028159 -0.272396 -0.502733 -0.759443 -1.031924 -0.157975 0.075659 0.604220 0.143298 -0.001849 0 0
23 1.069960 0.858822 -0.795544 0.076688 0.851875 0.735014 -0.758779 0.065595 0.532667 -0.391858 -0.497019 0.240822 0.848126 4 0
24 0.581377 -0.804045 0.399887 1.535671 0.245878 0.904192 -0.233991 -0.925983 0.212280 0.499535 -0.024926 -0.925999 1.294925 4 0
25 0.161110 0.025075 0.716318 1.532230 0.889883 0.353167 -0.058787 -0.593046 0.093773 0.927085 0.199691 -0.979872 0.232850 4 0
26 0.431443 0.442713 0.259120 0.045533 0.102675 0.367606 0.054320 0.942924 0.180609 0.550983 0.265291 0.321252 -0.830969 4 0
27 0.344525 -1.140315 -0.725453 -0.547965 0.449924 0.303904 1.053624 1.051712 0.509322 0.181611 -0.519979 -1.134490 -1.439105 3 0
28 -0.041565 0.671274 0.195143 0.247294 0.531620 1.050124 0.311358 0.988161 -0.198869 0.387795 1.757366 1.351684 0.194840 4 0
29 0.417845 -1.134173 -0.760709 -0.605264 0.077464 0.533333 1.104524 2.124971 0.083548 0.801730 0.092534 -1.281628 -1.468782 3 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
225 1.532114 -1.060006 -0.434145 -0.999435 -1.259462 0.039140 -0.802013 -0.655286 0.714448 1.005958 -0.086372 0.537392 0.054440 1 1
226 -0.942320 1.172080 0.506725 -0.230675 -0.104635 0.898742 -1.107001 -1.182148 -0.940991 0.232366 1.778224 0.975251 1.731084 0 1
227 1.421974 0.631029 -0.563813 -0.694595 -0.673270 0.929022 0.476907 -1.025173 -0.813644 -0.060006 -0.738730 -0.558099 0.057654 1 1
228 -1.473385 -0.806223 1.849423 -1.252541 0.941013 -0.872947 -1.812392 -0.242718 -0.097212 -0.510500 -0.232195 -0.546399 0.945530 2 1
229 -1.135926 -0.772372 1.164844 -1.022517 0.630202 -0.496999 -1.101656 -0.168921 -0.295159 -0.587401 0.369033 -0.266325 0.604469 2 1
230 -1.085049 0.879566 0.442593 0.128917 0.393498 0.531555 0.392194 1.418515 0.891015 -0.348926 -0.756201 -0.838584 -0.015971 3 1
231 -0.352258 0.556982 0.530520 0.443818 0.300921 0.032128 -0.797384 -0.573532 0.398084 0.328875 -0.274964 -1.300920 0.254456 0 1
232 -1.190363 0.797356 0.758472 0.587917 0.890540 0.471925 0.105793 0.680721 0.230834 -0.150709 -0.816744 -0.470618 0.371198 4 1
233 -0.651003 -0.586618 1.326854 -0.451354 0.507113 0.165474 -0.919675 -0.448249 -1.310940 -1.372737 0.406029 -1.414627 -0.434858 2 1
234 -1.459511 -0.516281 1.631699 -1.141842 0.584621 -0.458541 -1.428877 -0.934556 -0.216455 -0.049794 0.095580 0.387068 0.693730 2 1
235 -0.726984 0.702447 0.798069 -0.320660 0.530902 1.019988 0.144995 0.207847 0.039592 0.220761 0.762941 0.575034 0.671517 4 1
236 -0.300986 -0.404923 0.715406 0.245380 -0.427936 -0.334843 -0.228084 -0.330898 -0.674327 0.199560 0.827455 0.016433 0.866789 0 1
237 -0.736244 0.088611 0.910051 0.437100 0.258256 0.363828 -0.415290 -0.717445 -0.012727 0.436925 -0.786954 -1.217376 0.352825 2 1
238 0.610473 -2.664315 1.303652 -2.022376 1.500032 -1.280926 -1.249533 0.432111 -0.768558 0.291156 -0.092312 0.053770 -0.401166 2 1
239 -2.045424 -2.954642 0.302601 -0.868092 -1.038134 -1.230777 0.514329 0.057591 -1.023895 0.275395 -1.450282 0.386242 0.318763 2 1
240 0.329793 -1.367570 -1.454329 -0.207924 -0.723609 -0.149025 -0.085298 -0.011595 -0.240239 -0.009120 -0.325229 -0.025722 0.114182 1 1
241 -1.919591 1.382172 -0.134161 0.837967 -0.687780 0.944303 -0.258652 -0.742178 0.386031 -1.178099 -1.843543 -0.710556 -0.318561 3 1
242 -2.087669 1.400006 -0.494964 0.451717 -0.759188 0.736625 0.133121 -0.196031 1.121231 0.474128 -0.345937 -0.409324 -0.442069 4 1
243 -2.131652 0.439305 -0.612226 0.854126 -0.494550 0.825299 0.301373 -0.018964 0.690556 -0.078762 -0.709495 -0.075857 -0.418656 4 1
244 -1.611989 -0.756403 -0.410917 1.075909 0.297336 -1.317576 1.115011 -0.467065 -0.768378 1.615499 1.611125 -1.018782 -1.798744 2 1
245 -0.142010 0.000190 -0.063461 -0.506353 -0.386942 -0.256144 0.270621 -1.497417 0.507892 0.456828 -0.431169 -0.978417 0.015849 2 1
246 -1.263975 -1.168117 -1.396090 -0.312016 1.862268 1.400290 0.646060 -0.686864 0.418524 -0.069926 -0.653856 -0.853617 -0.106814 2 1
247 -0.507700 0.899825 1.510153 1.083642 2.081451 0.589016 0.901321 0.658808 0.152596 0.176442 -0.447633 0.287838 0.650479 4 1
248 -0.159768 0.518093 2.197018 0.698491 0.476336 -2.014255 -1.614667 -0.397282 -1.781932 -0.208894 1.650551 -0.771436 -0.987237 0 1
249 -1.037899 1.016712 2.774230 0.665468 -0.385673 0.587263 -0.121609 -0.331379 0.622484 -0.387131 -0.276584 0.218207 1.689216 0 1
250 -0.526923 -1.169944 0.474875 -0.789231 0.369827 -0.537003 -1.089843 -0.173366 -0.023237 -0.142334 0.740065 0.813114 0.872556 2 1
251 -0.770856 -1.024349 -0.019140 -0.097521 0.092703 0.369242 -0.273901 0.190740 -0.074032 0.113055 0.140291 -0.696275 0.166679 2 1
252 -0.905458 -0.790575 0.206164 -0.723816 -0.444860 0.107833 -0.734514 -0.533865 -0.634334 0.320526 0.088428 -0.348210 0.347201 2 1
253 -1.378235 -0.338405 0.016815 -0.394563 0.034043 1.023865 -0.303960 -1.316121 0.198697 0.670577 0.809574 0.580565 0.056004 2 1
254 -0.199959 -2.035812 -0.904507 -1.511975 -0.437843 0.262972 -1.943788 -1.963300 -2.256227 0.354369 -0.039829 0.882325 0.139307 2 1

255 rows × 15 columns

In [74]:
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
Out[74]:
<matplotlib.axes._subplots.AxesSubplot at 0x1e82aec1940>
In [ ]:
 
In [ ]:
 
In [ ]: